Skip to content

Commit 9c86167

Browse files
authored
Merge pull request #158 from paritytech/na-kill-try
use `?` instead of `try!`
2 parents 88b38e2 + 4e08afe commit 9c86167

File tree

11 files changed

+56
-58
lines changed

11 files changed

+56
-58
lines changed

Cargo.lock

Lines changed: 7 additions & 7 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

cli/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "ethabi-cli"
3-
version = "9.0.0"
3+
version = "9.0.1"
44
authors = ["Parity Technologies <[email protected]>"]
55
keywords = ["ethereum", "eth", "abi", "solidity", "cli"]
66
description = "Easy to use cli for conversion of ethereum contract calls to bytecode."
@@ -12,7 +12,7 @@ rustc-hex = "2.0"
1212
serde = "1.0"
1313
serde_derive = "1.0"
1414
docopt = "1.0"
15-
ethabi = { version = "9.0.0", path = "../ethabi" }
15+
ethabi = { version = "9.0.1", path = "../ethabi" }
1616
error-chain = { version = "0.12.1", default-features = false }
1717
tiny-keccak = "1.0"
1818

derive/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "ethabi-derive"
3-
version = "9.0.0"
3+
version = "9.0.1"
44
authors = ["Parity Technologies <[email protected]>"]
55
homepage = "https://github.com/paritytech/ethabi"
66
license = "MIT/Apache-2.0"
@@ -11,7 +11,7 @@ description = "Easy to use conversion of ethereum contract calls to bytecode."
1111
proc-macro = true
1212

1313
[dependencies]
14-
ethabi = { path = "../ethabi", version = "9.0.0" }
14+
ethabi = { path = "../ethabi", version = "9.0.1" }
1515
heck = "0.3"
1616
syn = "0.15"
1717
quote = "0.6"

ethabi/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "ethabi"
3-
version = "9.0.0"
3+
version = "9.0.1"
44
authors = ["Parity Technologies <[email protected]>"]
55
homepage = "https://github.com/paritytech/ethabi"
66
license = "MIT/Apache-2.0"

ethabi/src/decoder.rs

Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ fn take_bytes(slices: &[Word], position: usize, len: usize) -> Result<BytesTaken
6060

6161
let mut bytes_slices = Vec::with_capacity(slices_len);
6262
for i in 0..slices_len {
63-
let slice = try!(peek(slices, position + i));
63+
let slice = peek(slices, position + i)?;
6464
bytes_slices.push(slice);
6565
}
6666

@@ -80,7 +80,7 @@ fn take_bytes(slices: &[Word], position: usize, len: usize) -> Result<BytesTaken
8080
fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<DecodeResult, Error> {
8181
match *param {
8282
ParamType::Address => {
83-
let slice = try!(peek(slices, offset));
83+
let slice = peek(slices, offset)?;
8484
let mut address = [0u8; 20];
8585
address.copy_from_slice(&slice[12..]);
8686

@@ -92,7 +92,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
9292
Ok(result)
9393
},
9494
ParamType::Int(_) => {
95-
let slice = try!(peek(slices, offset));
95+
let slice = peek(slices, offset)?;
9696

9797
let result = DecodeResult {
9898
token: Token::Int(slice.clone().into()),
@@ -102,7 +102,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
102102
Ok(result)
103103
},
104104
ParamType::Uint(_) => {
105-
let slice = try!(peek(slices, offset));
105+
let slice = peek(slices, offset)?;
106106

107107
let result = DecodeResult {
108108
token: Token::Uint(slice.clone().into()),
@@ -112,9 +112,9 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
112112
Ok(result)
113113
},
114114
ParamType::Bool => {
115-
let slice = try!(peek(slices, offset));
115+
let slice = peek(slices, offset)?;
116116

117-
let b = try!(as_bool(slice));
117+
let b = as_bool(slice)?;
118118

119119
let result = DecodeResult {
120120
token: Token::Bool(b),
@@ -124,7 +124,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
124124
Ok(result)
125125
},
126126
ParamType::FixedBytes(len) => {
127-
let taken = try!(take_bytes(slices, offset, len));
127+
let taken = take_bytes(slices, offset, len)?;
128128

129129
let result = DecodeResult {
130130
token: Token::FixedBytes(taken.bytes),
@@ -134,13 +134,13 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
134134
Ok(result)
135135
},
136136
ParamType::Bytes => {
137-
let offset_slice = try!(peek(slices, offset));
138-
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
137+
let offset_slice = peek(slices, offset)?;
138+
let len_offset = (as_u32(offset_slice)? / 32) as usize;
139139

140-
let len_slice = try!(peek(slices, len_offset));
141-
let len = try!(as_u32(len_slice)) as usize;
140+
let len_slice = peek(slices, len_offset)?;
141+
let len = as_u32(len_slice)? as usize;
142142

143-
let taken = try!(take_bytes(slices, len_offset + 1, len));
143+
let taken = take_bytes(slices, len_offset + 1, len)?;
144144

145145
let result = DecodeResult {
146146
token: Token::Bytes(taken.bytes),
@@ -150,33 +150,33 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
150150
Ok(result)
151151
},
152152
ParamType::String => {
153-
let offset_slice = try!(peek(slices, offset));
154-
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
153+
let offset_slice = peek(slices, offset)?;
154+
let len_offset = (as_u32(offset_slice)? / 32) as usize;
155155

156-
let len_slice = try!(peek(slices, len_offset));
157-
let len = try!(as_u32(len_slice)) as usize;
156+
let len_slice = peek(slices, len_offset)?;
157+
let len = as_u32(len_slice)? as usize;
158158

159-
let taken = try!(take_bytes(slices, len_offset + 1, len));
159+
let taken = take_bytes(slices, len_offset + 1, len)?;
160160

161161
let result = DecodeResult {
162-
token: Token::String(try!(String::from_utf8(taken.bytes))),
162+
token: Token::String(String::from_utf8(taken.bytes)?),
163163
new_offset: offset + 1,
164164
};
165165

166166
Ok(result)
167167
},
168168
ParamType::Array(ref t) => {
169-
let offset_slice = try!(peek(slices, offset));
170-
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
169+
let offset_slice = peek(slices, offset)?;
170+
let len_offset = (as_u32(offset_slice)? / 32) as usize;
171171

172-
let len_slice = try!(peek(slices, len_offset));
173-
let len = try!(as_u32(len_slice)) as usize;
172+
let len_slice = peek(slices, len_offset)?;
173+
let len = as_u32(len_slice)? as usize;
174174

175175
let sub_slices = &slices[len_offset + 1..];
176176
let mut tokens = Vec::with_capacity(len);
177177
let mut new_offset = 0;
178178
for _ in 0..len {
179-
let res = try!(decode_param(t, &sub_slices, new_offset));
179+
let res = decode_param(t, &sub_slices, new_offset)?;
180180
new_offset = res.new_offset;
181181
tokens.push(res.token);
182182
}

ethabi/src/event.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ impl Event {
146146
.flat_map(|t| t.as_ref().to_vec())
147147
.collect::<Vec<u8>>();
148148

149-
let topic_tokens = try!(decode(&topic_types, &flat_topics));
149+
let topic_tokens = decode(&topic_types, &flat_topics)?;
150150

151151
// topic may be only a 32 bytes encoded token
152152
if topic_tokens.len() != topics_len - to_skip {
@@ -161,7 +161,7 @@ impl Event {
161161
.map(|p| p.kind.clone())
162162
.collect::<Vec<ParamType>>();
163163

164-
let data_tokens = try!(decode(&data_types, &data));
164+
let data_tokens = decode(&data_types, &data)?;
165165

166166
let data_named_tokens = data_params.into_iter()
167167
.map(|p| p.name)

ethabi/src/operation.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,9 @@ pub enum Operation {
2121

2222
impl<'a> Deserialize<'a> for Operation {
2323
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'a> {
24-
let v: Value = try!(Deserialize::deserialize(deserializer));
25-
let cloned = v.clone();
26-
let map = try!(cloned.as_object().ok_or_else(|| SerdeError::custom("Invalid operation")));
27-
let s = try!(map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type")));
24+
let v: Value = Deserialize::deserialize(deserializer)?;
25+
let map = v.as_object().ok_or_else(|| SerdeError::custom("Invalid operation"))?;
26+
let s = map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type"))?;
2827

2928
// This is a workaround to support non-spec compliant function and event names,
3029
// see: https://github.com/paritytech/parity/issues/4122

ethabi/src/param_type/reader.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@ impl Reader {
2121
let count = name.chars().count();
2222
if num.is_empty() {
2323
// we already know it's a dynamic array!
24-
let subtype = try!(Reader::read(&name[..count - 2]));
24+
let subtype = Reader::read(&name[..count - 2])?;
2525
return Ok(ParamType::Array(Box::new(subtype)));
2626
} else {
2727
// it's a fixed array.
28-
let len = try!(usize::from_str_radix(&num, 10));
29-
let subtype = try!(Reader::read(&name[..count - num.len() - 2]));
28+
let len = usize::from_str_radix(&num, 10)?;
29+
let subtype = Reader::read(&name[..count - num.len() - 2])?;
3030
return Ok(ParamType::FixedArray(Box::new(subtype), len));
3131
}
3232
}
@@ -39,15 +39,15 @@ impl Reader {
3939
"int" => ParamType::Int(256),
4040
"uint" => ParamType::Uint(256),
4141
s if s.starts_with("int") => {
42-
let len = try!(usize::from_str_radix(&s[3..], 10));
42+
let len = usize::from_str_radix(&s[3..], 10)?;
4343
ParamType::Int(len)
4444
},
4545
s if s.starts_with("uint") => {
46-
let len = try!(usize::from_str_radix(&s[4..], 10));
46+
let len = usize::from_str_radix(&s[4..], 10)?;
4747
ParamType::Uint(len)
4848
},
4949
s if s.starts_with("bytes") => {
50-
let len = try!(usize::from_str_radix(&s[5..], 10));
50+
let len = usize::from_str_radix(&s[5..], 10)?;
5151
ParamType::FixedBytes(len)
5252
},
5353
_ => {

ethabi/src/token/lenient.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ impl Tokenizer for LenientTokenizer {
3232
return result;
3333
}
3434

35-
let uint = try!(u32::from_str_radix(value, 10));
35+
let uint = u32::from_str_radix(value, 10)?;
3636
Ok(pad_u32(uint))
3737
}
3838

@@ -42,7 +42,7 @@ impl Tokenizer for LenientTokenizer {
4242
return result;
4343
}
4444

45-
let int = try!(i32::from_str_radix(value, 10));
45+
let int = i32::from_str_radix(value, 10)?;
4646
Ok(pad_i32(int))
4747
}
4848
}

ethabi/src/token/mod.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ pub trait Tokenizer {
2828

2929
/// Tries to parse a value as a vector of tokens of fixed size.
3030
fn tokenize_fixed_array(value: &str, param: &ParamType, len: usize) -> Result<Vec<Token>, Error> {
31-
let result = try!(Self::tokenize_array(value, param));
31+
let result = Self::tokenize_array(value, param)?;
3232
match result.len() == len {
3333
true => Ok(result),
3434
false => Err(ErrorKind::InvalidData.into()),
@@ -60,7 +60,7 @@ pub trait Tokenizer {
6060
return Err(ErrorKind::InvalidData.into());
6161
} else if nested == 0 {
6262
let sub = &value[last_item..i];
63-
let token = try!(Self::tokenize(param, sub));
63+
let token = Self::tokenize(param, sub)?;
6464
result.push(token);
6565
last_item = i + 1;
6666
}
@@ -70,7 +70,7 @@ pub trait Tokenizer {
7070
},
7171
',' if nested == 1 && ignore == false => {
7272
let sub = &value[last_item..i];
73-
let token = try!(Self::tokenize(param, sub));
73+
let token = Self::tokenize(param, sub)?;
7474
result.push(token);
7575
last_item = i + 1;
7676
},

0 commit comments

Comments
 (0)