Skip to content

Commit

Permalink
Merge pull request #116 from outbrain/general-fixes
Browse files Browse the repository at this point in the history
0 warning compilation
  • Loading branch information
SkBlaz authored Sep 5, 2023
2 parents 7321d09 + 8235a50 commit 572a99e
Show file tree
Hide file tree
Showing 21 changed files with 49 additions and 15 deletions.
4 changes: 3 additions & 1 deletion src/block_ffm.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports, unused_mut, invalid_value)]

use core::arch::x86_64::*;
use rustc_hash::FxHashSet;
use std::any::Any;
Expand Down Expand Up @@ -283,7 +285,7 @@ impl<L: OptimizerTrait + 'static> BlockTrait for BlockFFM<L> {
fb.ffm_buffer.len() * (self.ffm_k * self.ffm_num_fields) as usize;
if local_data_ffm_len < FFM_STACK_BUF_LEN {
// Fast-path - using on-stack data structures
let mut local_data_ffm_values: [f32; FFM_STACK_BUF_LEN as usize] =
let local_data_ffm_values: [f32; FFM_STACK_BUF_LEN as usize] =
MaybeUninit::uninit().assume_init();
core_macro!(local_data_ffm_values);
} else {
Expand Down
2 changes: 2 additions & 0 deletions src/block_helpers.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::optimizer::OptimizerTrait;
use std::error::Error;
use std::io;
Expand Down
2 changes: 2 additions & 0 deletions src/block_misc.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use std::any::Any;
use std::error::Error;

Expand Down
9 changes: 4 additions & 5 deletions src/block_neural.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use rand_distr::{Distribution, Normal, Uniform};
use rand_xoshiro::rand_core::SeedableRng;
use rand_xoshiro::Xoshiro256PlusPlus;
Expand Down Expand Up @@ -253,11 +255,8 @@ impl<L: OptimizerTrait + 'static> BlockTrait for BlockNeuronLayer<L> {
unsafe {
if update && self.neuron_type == NeuronType::WeightedSum {
// first we need to initialize inputs to zero
// TODO - what to think about this buffer
let mut output_errors: [f32; MAX_NUM_INPUTS] = MaybeUninit::uninit().assume_init();
output_errors
.get_unchecked_mut(0..self.num_inputs)
.fill(0.0);

let mut output_errors: [f32; MAX_NUM_INPUTS] = [0.0; MAX_NUM_INPUTS];

let (input_tape, output_tape) = block_helpers::get_input_output_borrows(
&mut pb.tape,
Expand Down
2 changes: 2 additions & 0 deletions src/block_relu.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use std::any::Any;
use std::error::Error;

Expand Down
2 changes: 2 additions & 0 deletions src/cache.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use std::error::Error;
use std::fs;
Expand Down
2 changes: 2 additions & 0 deletions src/cmdline.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::version;
use clap::{App, AppSettings, Arg};

Expand Down
2 changes: 2 additions & 0 deletions src/feature_buffer.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::feature_transform_executor;
use crate::model_instance;
use crate::parser;
Expand Down
2 changes: 2 additions & 0 deletions src/feature_transform_executor.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::parser;
use crate::vwmap;
use std::error::Error;
Expand Down
6 changes: 4 additions & 2 deletions src/feature_transform_implementations.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use std::error::Error;
use std::io::Error as IOError;
use std::io::ErrorKind;
Expand Down Expand Up @@ -216,14 +218,14 @@ impl FunctionExecutorTrait for TransformerLogRatioBinner {
feature_reader_float_namespace!(
record_buffer,
self.from_namespace1.namespace_descriptor,
hash_index1,
_hash_index1,
hash_value1,
float_value1,
{
feature_reader_float_namespace!(
record_buffer,
self.from_namespace2.namespace_descriptor,
hash_index2,
_hash_index2,
hash_value2,
float_value2,
{
Expand Down
5 changes: 3 additions & 2 deletions src/feature_transform_parser.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//#[macro_use]
//extern crate nom;
#![allow(dead_code,unused_imports)]

use crate::vwmap;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -68,7 +69,7 @@ impl NamespaceTransformsParser {
}
let (
_,
(to_namespace_verbose, function_name, from_namespaces_verbose, function_parameters),
(to_namespace_verbose, _function_name, from_namespaces_verbose, _function_parameters),
) = rr.unwrap();

// Here we just check for clashes with namespaces from input file
Expand Down Expand Up @@ -242,7 +243,7 @@ impl NamespaceTransforms {
}

pub fn get_namespace_descriptor(
transform_namespaces: &NamespaceTransforms,
_transform_namespaces: &NamespaceTransforms,
vw: &vwmap::VwNamespaceMap,
namespace_char: char,
) -> Result<vwmap::NamespaceDescriptor, Box<dyn Error>> {
Expand Down
2 changes: 2 additions & 0 deletions src/graph.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::block_misc;
use crate::model_instance;
use crate::port_buffer;
Expand Down
5 changes: 2 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
#![allow(dead_code)]
#![allow(unused_imports)]
#![allow(unused_variables)]
#![allow(unused_mut)]
#![allow(non_snake_case)]
#![allow(redundant_semicolons)]
#![allow(dead_code,unused_imports)]

use crate::hogwild::HogwildTrainer;
use crate::multithread_helpers::BoxedRegressorTrait;
Expand Down Expand Up @@ -107,7 +106,7 @@ fn build_cache_without_training(cl: clap::ArgMatches) -> Result<(), Box<dyn Erro
}
} else {
reading_result = cache.get_next_record();
buffer = match reading_result {
match reading_result {
Ok([]) => break, // EOF
Ok(buffer) => buffer,
Err(_e) => return Err(_e),
Expand Down
2 changes: 2 additions & 0 deletions src/model_instance.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use std::error::Error;
use std::io::Error as IOError;
use std::io::ErrorKind;
Expand Down
5 changes: 3 additions & 2 deletions src/optimizer.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#![allow(dead_code,unused_imports)]
use std::marker::PhantomData;

pub trait OptimizerTrait: std::clone::Clone {
Expand Down Expand Up @@ -123,9 +124,9 @@ impl OptimizerTrait for OptimizerAdagradLUT {
let minus_power_t = -power_t;
for x in 0..FASTMATH_LR_LUT_SIZE {
// accumulated gradients are always positive floating points, sign is guaranteed to be zero
// floating point: 1 bit of sign, 7 bits of signed expontent then floating point bits (mantissa)
// floating point: 1 bit of sign, 7 bits of signed exponent then floating point bits (mantissa)
// we will take 7 bits of exponent + whatever most significant bits of mantissa remain
// we take two consequtive such values, so we act as if had rounding
// we take two consequtive such values, so we act as if it had rounding
let float_x =
(f32::from_bits((x as u32) << (31 - FASTMATH_LR_LUT_BITS))) + initial_acc_gradient;
let float_x_plus_one =
Expand Down
2 changes: 2 additions & 0 deletions src/parser.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::radix_tree::{NamespaceDescriptorWithHash, RadixTree};
use crate::vwmap;
use fasthash::murmur3;
Expand Down
2 changes: 2 additions & 0 deletions src/persistence.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use std::error::Error;
use std::str;

Expand Down
2 changes: 2 additions & 0 deletions src/radix_tree.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use crate::vwmap::NamespaceDescriptor;

#[derive(Clone, Copy, Debug, PartialEq)]
Expand Down
2 changes: 2 additions & 0 deletions src/regressor.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use rustc_hash::FxHashSet;
use std::any::Any;
Expand Down
2 changes: 2 additions & 0 deletions src/serving.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use daemonize::Daemonize;
use std::error::Error;
use std::io;
Expand Down
2 changes: 2 additions & 0 deletions src/vwmap.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#![allow(dead_code,unused_imports)]

use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::error::Error;
Expand Down

0 comments on commit 572a99e

Please sign in to comment.