rust-meval-update-dependencies.patch 73 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281
  1. From b460b58f96310693a06cd180b45c3d5d60c778de Mon Sep 17 00:00:00 2001
  2. From: Simon Gardling <titaniumtown@gmail.com>
  3. Date: Wed, 16 Feb 2022 10:08:55 -0500
  4. Subject: [PATCH 01/10] update all dependencies (except for nom)
  5. Upstream: <https://github.com/rekka/meval-rs/pull/27>.
  6. ---
  7. Cargo.toml | 8 ++++----
  8. 1 file changed, 4 insertions(+), 4 deletions(-)
  9. diff --git a/Cargo.toml b/Cargo.toml
  10. index 1b8a828..c395833 100644
  11. --- a/Cargo.toml
  12. +++ b/Cargo.toml
  13. @@ -14,14 +14,14 @@ exclude = ["README.tpl", ".travis.yml"]
  14. [dependencies]
  15. fnv = "1.0.5"
  16. nom = "1.0.0"
  17. -serde = { version = "1", optional = true }
  18. +serde = { version = "1.0.136", optional = true }
  19. [dev-dependencies]
  20. -gnuplot = "0.0.23"
  21. -serde_test = "1"
  22. +gnuplot = "0.0.37"
  23. +serde_test = "1.0.136"
  24. serde_derive = "1"
  25. serde_json = "1"
  26. -toml = "0.4.5"
  27. +toml = "0.5.8"
  28. [features]
  29. default = []
  30. From 978dbcf905bd7d051079e4bc197d75ebd8c9c162 Mon Sep 17 00:00:00 2001
  31. From: Ishaan <whthownothing@gmail.com>
  32. Date: Fri, 21 Jan 2022 20:53:17 +0530
  33. Subject: [PATCH 02/10] removed depracation warnings and substituted
  34. ---
  35. src/expr.rs | 26 +++++++++++++-------------
  36. src/lib.rs | 18 ++++--------------
  37. src/tokenizer.rs | 11 +++++------
  38. 3 files changed, 22 insertions(+), 33 deletions(-)
  39. diff --git a/src/expr.rs b/src/expr.rs
  40. index 23d56ce..f6b303c 100644
  41. --- a/src/expr.rs
  42. +++ b/src/expr.rs
  43. @@ -156,7 +156,7 @@ impl Expr {
  44. where
  45. C: ContextProvider + 'a,
  46. {
  47. - try!(self.check_context(((var, 0.), &ctx)));
  48. + self.check_context(((var, 0.), &ctx))?;
  49. let var = var.to_owned();
  50. Ok(move |x| {
  51. self.eval_with_context(((&var, x), &ctx))
  52. @@ -194,7 +194,7 @@ impl Expr {
  53. where
  54. C: ContextProvider + 'a,
  55. {
  56. - try!(self.check_context(([(var1, 0.), (var2, 0.)], &ctx)));
  57. + self.check_context(([(var1, 0.), (var2, 0.)], &ctx))?;
  58. let var1 = var1.to_owned();
  59. let var2 = var2.to_owned();
  60. Ok(move |x, y| {
  61. @@ -239,7 +239,7 @@ impl Expr {
  62. where
  63. C: ContextProvider + 'a,
  64. {
  65. - try!(self.check_context(([(var1, 0.), (var2, 0.), (var3, 0.)], &ctx)));
  66. + self.check_context(([(var1, 0.), (var2, 0.), (var3, 0.)], &ctx))?;
  67. let var1 = var1.to_owned();
  68. let var2 = var2.to_owned();
  69. let var3 = var3.to_owned();
  70. @@ -287,7 +287,7 @@ impl Expr {
  71. where
  72. C: ContextProvider + 'a,
  73. {
  74. - try!(self.check_context(([(var1, 0.), (var2, 0.), (var3, 0.), (var4, 0.)], &ctx)));
  75. + self.check_context(([(var1, 0.), (var2, 0.), (var3, 0.), (var4, 0.)], &ctx))?;
  76. let var1 = var1.to_owned();
  77. let var2 = var2.to_owned();
  78. let var3 = var3.to_owned();
  79. @@ -338,10 +338,10 @@ impl Expr {
  80. where
  81. C: ContextProvider + 'a,
  82. {
  83. - try!(self.check_context((
  84. + self.check_context((
  85. [(var1, 0.), (var2, 0.), (var3, 0.), (var4, 0.), (var5, 0.)],
  86. &ctx
  87. - )));
  88. + ))?;
  89. let var1 = var1.to_owned();
  90. let var2 = var2.to_owned();
  91. let var3 = var3.to_owned();
  92. @@ -389,12 +389,12 @@ impl Expr {
  93. C: ContextProvider + 'a,
  94. {
  95. let n = vars.len();
  96. - try!(self.check_context((
  97. + self.check_context((
  98. vars.into_iter()
  99. .zip(vec![0.; n].into_iter())
  100. .collect::<Vec<_>>(),
  101. &ctx
  102. - )));
  103. + ))?;
  104. let vars = vars.iter().map(|v| v.to_owned()).collect::<Vec<_>>();
  105. Ok(move |x: &[f64]| {
  106. self.eval_with_context((
  107. @@ -447,7 +447,7 @@ impl Expr {
  108. /// Evaluates a string with built-in constants and functions.
  109. pub fn eval_str<S: AsRef<str>>(expr: S) -> Result<f64, Error> {
  110. - let expr = try!(Expr::from_str(expr.as_ref()));
  111. + let expr = Expr::from_str(expr.as_ref())?;
  112. expr.eval_with_context(builtin())
  113. }
  114. @@ -456,9 +456,9 @@ impl FromStr for Expr {
  115. type Err = Error;
  116. /// Constructs an expression by parsing a string.
  117. fn from_str(s: &str) -> Result<Self, Self::Err> {
  118. - let tokens = try!(tokenize(s));
  119. + let tokens = tokenize(s)?;
  120. - let rpn = try!(to_rpn(&tokens));
  121. + let rpn = to_rpn(&tokens)?;
  122. Ok(Expr { rpn: rpn })
  123. }
  124. @@ -471,7 +471,7 @@ pub fn eval_str_with_context<S: AsRef<str>, C: ContextProvider>(
  125. expr: S,
  126. ctx: C,
  127. ) -> Result<f64, Error> {
  128. - let expr = try!(Expr::from_str(expr.as_ref()));
  129. + let expr = Expr::from_str(expr.as_ref())?;
  130. expr.eval_with_context(ctx)
  131. }
  132. @@ -856,7 +856,7 @@ impl<'a> Default for Context<'a> {
  133. }
  134. }
  135. -type GuardedFunc<'a> = Rc<Fn(&[f64]) -> Result<f64, FuncEvalError> + 'a>;
  136. +type GuardedFunc<'a> = Rc<dyn Fn(&[f64]) -> Result<f64, FuncEvalError> + 'a>;
  137. /// Trait for types that can specify the number of required arguments for a function with a
  138. /// variable number of arguments.
  139. diff --git a/src/lib.rs b/src/lib.rs
  140. index 2aea3b7..0c8d20d 100644
  141. --- a/src/lib.rs
  142. +++ b/src/lib.rs
  143. @@ -233,15 +233,15 @@ impl fmt::Display for Error {
  144. write!(f, "Evaluation error: function `{}`: {}", name, e)
  145. }
  146. Error::ParseError(ref e) => {
  147. - try!(write!(f, "Parse error: "));
  148. + write!(f, "Parse error: ")?;
  149. e.fmt(f)
  150. }
  151. Error::RPNError(ref e) => {
  152. - try!(write!(f, "RPN error: "));
  153. + write!(f, "RPN error: ")?;
  154. e.fmt(f)
  155. }
  156. Error::EvalError(ref e) => {
  157. - try!(write!(f, "Eval error: "));
  158. + write!(f, "Eval error: ")?;
  159. e.fmt(f)
  160. }
  161. }
  162. @@ -261,17 +261,7 @@ impl From<RPNError> for Error {
  163. }
  164. impl std::error::Error for Error {
  165. - fn description(&self) -> &str {
  166. - match *self {
  167. - Error::UnknownVariable(_) => "unknown variable",
  168. - Error::Function(_, _) => "function evaluation error",
  169. - Error::EvalError(_) => "eval error",
  170. - Error::ParseError(ref e) => e.description(),
  171. - Error::RPNError(ref e) => e.description(),
  172. - }
  173. - }
  174. -
  175. - fn cause(&self) -> Option<&std::error::Error> {
  176. + fn cause(&self) -> Option<&dyn std::error::Error> {
  177. match *self {
  178. Error::ParseError(ref e) => Some(e),
  179. Error::RPNError(ref e) => Some(e),
  180. diff --git a/src/tokenizer.rs b/src/tokenizer.rs
  181. index d4692e3..f63fcf3 100644
  182. --- a/src/tokenizer.rs
  183. +++ b/src/tokenizer.rs
  184. @@ -5,8 +5,7 @@
  185. //! The parser should tokenize only well-formed expressions.
  186. //!
  187. //! [nom]: https://crates.io/crates/nom
  188. -use nom::{multispace, slice_to_offsets, IResult, Needed};
  189. -use std;
  190. +use nom::{Needed, multispace, slice_to_offsets, IResult};
  191. use std::fmt;
  192. use std::str::from_utf8;
  193. @@ -117,16 +116,16 @@ named!(comma<Token>, chain!(tag!(","), || Token::Comma));
  194. fn ident(input: &[u8]) -> IResult<&[u8], &[u8]> {
  195. use nom::Err::*;
  196. use nom::IResult::*;
  197. - use nom::{ErrorKind, Needed};
  198. + use nom::ErrorKind;
  199. // first character must be 'a'...'z' | 'A'...'Z' | '_'
  200. match input.first().cloned() {
  201. - Some(b'a'...b'z') | Some(b'A'...b'Z') | Some(b'_') => {
  202. + Some(b'a'..=b'z') | Some(b'A'..=b'Z') | Some(b'_') => {
  203. let n = input
  204. .iter()
  205. .skip(1)
  206. .take_while(|&&c| match c {
  207. - b'a'...b'z' | b'A'...b'Z' | b'_' | b'0'...b'9' => true,
  208. + b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'0'..=b'9' => true,
  209. _ => false,
  210. })
  211. .count();
  212. @@ -145,7 +144,7 @@ named!(
  213. ))
  214. );
  215. -/// Parse `func(`, returns `func`.
  216. +// Parse `func(`, returns `func`.
  217. named!(
  218. func<Token>,
  219. map!(
  220. From 9cd8b3dd90acfa6281e0c326719de6cbf532fa7f Mon Sep 17 00:00:00 2001
  221. From: Simon Gardling <titaniumtown@gmail.com>
  222. Date: Wed, 16 Feb 2022 10:21:33 -0500
  223. Subject: [PATCH 03/10] bump even more dependencies
  224. ---
  225. Cargo.toml | 6 +++---
  226. 1 file changed, 3 insertions(+), 3 deletions(-)
  227. diff --git a/Cargo.toml b/Cargo.toml
  228. index c395833..2cc0eb4 100644
  229. --- a/Cargo.toml
  230. +++ b/Cargo.toml
  231. @@ -12,15 +12,15 @@ version = "0.2.0"
  232. exclude = ["README.tpl", ".travis.yml"]
  233. [dependencies]
  234. -fnv = "1.0.5"
  235. +fnv = "1.0.7"
  236. nom = "1.0.0"
  237. serde = { version = "1.0.136", optional = true }
  238. [dev-dependencies]
  239. gnuplot = "0.0.37"
  240. serde_test = "1.0.136"
  241. -serde_derive = "1"
  242. -serde_json = "1"
  243. +serde_derive = "1.0.136"
  244. +serde_json = "1.0.79"
  245. toml = "0.5.8"
  246. [features]
  247. From 595ea70b5901d48bd119e9d98aa1e84849ec1945 Mon Sep 17 00:00:00 2001
  248. From: Simon Gardling <titaniumtown@gmail.com>
  249. Date: Wed, 16 Feb 2022 10:34:18 -0500
  250. Subject: [PATCH 04/10] nom 7.1.0
  251. based on work from: https://github.com/rekka/meval-rs/pull/22
  252. ---
  253. Cargo.toml | 2 +-
  254. src/lib.rs | 8 +-
  255. src/shunting_yard.rs | 1 +
  256. src/tokenizer.rs | 749 ++++++++++++++++++++++++++++---------------
  257. 4 files changed, 496 insertions(+), 264 deletions(-)
  258. diff --git a/Cargo.toml b/Cargo.toml
  259. index 2cc0eb4..60b4064 100644
  260. --- a/Cargo.toml
  261. +++ b/Cargo.toml
  262. @@ -13,7 +13,7 @@ exclude = ["README.tpl", ".travis.yml"]
  263. [dependencies]
  264. fnv = "1.0.7"
  265. -nom = "1.0.0"
  266. +nom = "7.1.0"
  267. serde = { version = "1.0.136", optional = true }
  268. [dev-dependencies]
  269. diff --git a/src/lib.rs b/src/lib.rs
  270. index 0c8d20d..69261b1 100644
  271. --- a/src/lib.rs
  272. +++ b/src/lib.rs
  273. @@ -208,7 +208,7 @@ pub mod de;
  274. pub use expr::*;
  275. pub use shunting_yard::RPNError;
  276. -pub use tokenizer::ParseError;
  277. +pub use tokenizer::TokenParseError;
  278. /// An error produced during parsing or evaluation.
  279. #[derive(Debug, Clone, PartialEq)]
  280. @@ -216,7 +216,7 @@ pub enum Error {
  281. UnknownVariable(String),
  282. Function(String, FuncEvalError),
  283. /// An error returned by the parser.
  284. - ParseError(ParseError),
  285. + ParseError(TokenParseError),
  286. /// The shunting-yard algorithm returned an error.
  287. RPNError(RPNError),
  288. // A catch all for all other errors during evaluation
  289. @@ -248,8 +248,8 @@ impl fmt::Display for Error {
  290. }
  291. }
  292. -impl From<ParseError> for Error {
  293. - fn from(err: ParseError) -> Error {
  294. +impl From<TokenParseError> for Error {
  295. + fn from(err: TokenParseError) -> Error {
  296. Error::ParseError(err)
  297. }
  298. }
  299. diff --git a/src/shunting_yard.rs b/src/shunting_yard.rs
  300. index 5fe3010..90026bf 100644
  301. --- a/src/shunting_yard.rs
  302. +++ b/src/shunting_yard.rs
  303. @@ -1,3 +1,4 @@
  304. +
  305. //! Implementation of the shunting-yard algorithm for converting an infix expression to an
  306. //! expression in reverse Polish notation (RPN).
  307. //!
  308. diff --git a/src/tokenizer.rs b/src/tokenizer.rs
  309. index f63fcf3..0e08337 100644
  310. --- a/src/tokenizer.rs
  311. +++ b/src/tokenizer.rs
  312. @@ -5,44 +5,66 @@
  313. //! The parser should tokenize only well-formed expressions.
  314. //!
  315. //! [nom]: https://crates.io/crates/nom
  316. -use nom::{Needed, multispace, slice_to_offsets, IResult};
  317. +
  318. +use nom::{
  319. + branch::alt,
  320. + bytes::complete::is_a,
  321. + bytes::complete::{escaped, take, tag, take_while},
  322. + character::complete::{anychar, digit1, multispace0, alphanumeric1, alphanumeric0, char, alpha1, one_of},
  323. + combinator::{complete, peek, all_consuming, recognize, map, opt, cut, not},
  324. + error::{context, convert_error, ErrorKind, ParseError, VerboseError},
  325. + multi::separated_list0,
  326. + number::complete::double,
  327. + sequence::{tuple, pair, delimited, preceded, separated_pair, terminated},
  328. + Err, Needed, IResult
  329. +};
  330. +
  331. use std::fmt;
  332. use std::str::from_utf8;
  333. +use std::f64;
  334. /// An error reported by the parser.
  335. #[derive(Debug, Clone, PartialEq)]
  336. -pub enum ParseError {
  337. +pub enum TokenParseError {
  338. /// A token that is not allowed at the given location (contains the location of the offending
  339. /// character in the source string).
  340. UnexpectedToken(usize),
  341. +
  342. + UnexpectedStrToken(String),
  343. /// Missing right parentheses at the end of the source string (contains the number of missing
  344. /// parens).
  345. MissingRParen(i32),
  346. /// Missing operator or function argument at the end of the expression.
  347. MissingArgument,
  348. +
  349. + UnknownError
  350. }
  351. -impl fmt::Display for ParseError {
  352. +impl fmt::Display for TokenParseError {
  353. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
  354. - match *self {
  355. - ParseError::UnexpectedToken(i) => write!(f, "Unexpected token at byte {}.", i),
  356. - ParseError::MissingRParen(i) => write!(
  357. + match &*self {
  358. + TokenParseError::UnexpectedToken(i) => write!(f, "Unexpected token at byte {}.", i),
  359. + TokenParseError::UnexpectedStrToken(s) => write!(f, "Unexpected token {}.", s),
  360. + TokenParseError::MissingRParen(i) => write!(
  361. f,
  362. "Missing {} right parenthes{}.",
  363. i,
  364. - if i == 1 { "is" } else { "es" }
  365. + if *i == 1 { "is" } else { "es" }
  366. ),
  367. - ParseError::MissingArgument => write!(f, "Missing argument at the end of expression."),
  368. + TokenParseError::MissingArgument => write!(f, "Missing argument at the end of expression."),
  369. + TokenParseError::UnknownError => write!(f, "Unknown pass error."),
  370. }
  371. }
  372. }
  373. -impl std::error::Error for ParseError {
  374. +impl std::error::Error for TokenParseError {
  375. fn description(&self) -> &str {
  376. match *self {
  377. - ParseError::UnexpectedToken(_) => "unexpected token",
  378. - ParseError::MissingRParen(_) => "missing right parenthesis",
  379. - ParseError::MissingArgument => "missing argument",
  380. + TokenParseError::UnexpectedToken(_) => "unexpected token",
  381. + TokenParseError::UnexpectedStrToken(_) => "Unexpected token",
  382. + TokenParseError::MissingRParen(_) => "missing right parenthesis",
  383. + TokenParseError::MissingArgument => "missing argument",
  384. + TokenParseError::UnknownError => "unknown error",
  385. }
  386. }
  387. }
  388. @@ -73,6 +95,8 @@ pub enum Token {
  389. RParen,
  390. /// Comma: function argument separator
  391. Comma,
  392. + /// Decimal Point
  393. + //DecimalPoint,
  394. /// A number.
  395. Number(f64),
  396. @@ -82,179 +106,150 @@ pub enum Token {
  397. Func(String, Option<usize>),
  398. }
  399. -named!(
  400. - binop<Token>,
  401. - alt!(
  402. - chain!(tag!("+"), || Token::Binary(Operation::Plus))
  403. - | chain!(tag!("-"), || Token::Binary(Operation::Minus))
  404. - | chain!(tag!("*"), || Token::Binary(Operation::Times))
  405. - | chain!(tag!("/"), || Token::Binary(Operation::Div))
  406. - | chain!(tag!("%"), || Token::Binary(Operation::Rem))
  407. - | chain!(tag!("^"), || Token::Binary(Operation::Pow))
  408. - )
  409. -);
  410. -
  411. -named!(
  412. - negpos<Token>,
  413. - alt!(
  414. - chain!(tag!("+"), || Token::Unary(Operation::Plus))
  415. - | chain!(tag!("-"), || Token::Unary(Operation::Minus))
  416. - )
  417. -);
  418. -
  419. -named!(
  420. - fact<Token>,
  421. - chain!(tag!("!"), || Token::Unary(Operation::Fact))
  422. -);
  423. -named!(lparen<Token>, chain!(tag!("("), || Token::LParen));
  424. -named!(rparen<Token>, chain!(tag!(")"), || Token::RParen));
  425. -named!(comma<Token>, chain!(tag!(","), || Token::Comma));
  426. -
  427. -/// Parse an identifier:
  428. -///
  429. -/// Must start with a letter or an underscore, can be followed by letters, digits or underscores.
  430. -fn ident(input: &[u8]) -> IResult<&[u8], &[u8]> {
  431. - use nom::Err::*;
  432. - use nom::IResult::*;
  433. - use nom::ErrorKind;
  434. -
  435. - // first character must be 'a'...'z' | 'A'...'Z' | '_'
  436. - match input.first().cloned() {
  437. - Some(b'a'..=b'z') | Some(b'A'..=b'Z') | Some(b'_') => {
  438. - let n = input
  439. - .iter()
  440. - .skip(1)
  441. - .take_while(|&&c| match c {
  442. - b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'0'..=b'9' => true,
  443. - _ => false,
  444. - })
  445. - .count();
  446. - let (parsed, rest) = input.split_at(n + 1);
  447. - Done(rest, parsed)
  448. - }
  449. - None => Incomplete(Needed::Size(1)),
  450. - _ => Error(Position(ErrorKind::Custom(0), input)),
  451. - }
  452. +/// Continuing the trend of starting from the simplest piece and building up,
  453. +/// we start by creating a parser for the built-in operator functions.
  454. +fn binop<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  455. + // one_of matches one of the characters we give it
  456. + let (i, t) = one_of("+-*/%^!")(i)?;
  457. +
  458. + // because we are matching single character tokens, we can do the matching logic
  459. + // on the returned value
  460. + Ok((
  461. + i,
  462. + match t {
  463. + '+' => Token::Binary(Operation::Plus),
  464. + '-' => Token::Binary(Operation::Minus),
  465. + '*' => Token::Binary(Operation::Times),
  466. + '/' => Token::Binary(Operation::Div),
  467. + '%' => Token::Binary(Operation::Rem),
  468. + '^' => Token::Binary(Operation::Pow),
  469. + '!' => Token::Binary(Operation::Fact),
  470. + _ => unreachable!(),
  471. + },
  472. + ))
  473. }
  474. -named!(
  475. - var<Token>,
  476. - map!(map_res!(complete!(ident), from_utf8), |s: &str| Token::Var(
  477. - s.into()
  478. - ))
  479. -);
  480. -
  481. -// Parse `func(`, returns `func`.
  482. -named!(
  483. - func<Token>,
  484. - map!(
  485. - map_res!(
  486. - terminated!(
  487. - complete!(ident),
  488. - preceded!(opt!(multispace), complete!(tag!("(")))
  489. - ),
  490. - from_utf8
  491. - ),
  492. - |s: &str| Token::Func(s.into(), None)
  493. - )
  494. -);
  495. +fn lparen<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  496. + map(tag("("), |_: &str| Token::LParen)(i)
  497. +}
  498. -/// Matches one or more digit characters `0`...`9`.
  499. -///
  500. -/// Never returns `nom::IResult::Incomplete`.
  501. -///
  502. -/// Fix of IMHO broken `nom::digit`, which parses an empty string successfully.
  503. -fn digit_complete(input: &[u8]) -> IResult<&[u8], &[u8]> {
  504. - use nom::Err::*;
  505. - use nom::IResult::*;
  506. - use nom::{is_digit, ErrorKind};
  507. -
  508. - let n = input.iter().take_while(|&&c| is_digit(c)).count();
  509. - if n > 0 {
  510. - let (parsed, rest) = input.split_at(n);
  511. - Done(rest, parsed)
  512. - } else {
  513. - Error(Position(ErrorKind::Digit, input))
  514. +fn rparen<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  515. + map(tag(")"), |_: &str| Token::RParen)(i)
  516. +}
  517. +
  518. +fn comma<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  519. + map(tag(","), |_: &str| Token::Comma)(i)
  520. +}
  521. +
  522. +/// negpos parse. detects either - or +
  523. +fn negpos_s<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  524. +
  525. + match alt((tag("+"), tag("-"), ))(i) {
  526. + Ok((remaining_input, operator)) => Ok((remaining_input, operator)),
  527. + Err(e) => Err(e)
  528. }
  529. }
  530. -named!(
  531. - float<usize>,
  532. - chain!(
  533. - a: digit_complete ~
  534. - b: complete!(chain!(tag!(".") ~ d: digit_complete?,
  535. - ||{1 + d.map(|s| s.len()).unwrap_or(0)}))? ~
  536. - e: complete!(exp),
  537. - ||{a.len() + b.unwrap_or(0) + e.unwrap_or(0)}
  538. - )
  539. -);
  540. -
  541. -/// Parser that matches the exponential part of a float. If the `input[0] == 'e' | 'E'` then at
  542. -/// least one digit must match.
  543. -fn exp(input: &[u8]) -> IResult<&[u8], Option<usize>> {
  544. - use nom::IResult::*;
  545. - match alt!(input, tag!("e") | tag!("E")) {
  546. - Incomplete(_) | Error(_) => Done(input, None),
  547. - Done(i, _) => match chain!(i, s: alt!(tag!("+") | tag!("-"))? ~
  548. - e: digit_complete,
  549. - ||{Some(1 + s.map(|s| s.len()).unwrap_or(0) + e.len())})
  550. - {
  551. - Incomplete(Needed::Size(i)) => Incomplete(Needed::Size(i + 1)),
  552. - o => o,
  553. +/// negpos parse. detects either - or +
  554. +fn negpos<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  555. +
  556. + match negpos_s(i) {
  557. + Ok((remaining_input, operator)) => {
  558. + match operator.as_ref() {
  559. + "+" => Ok((remaining_input, Token::Unary(Operation::Plus))),
  560. + "-" => Ok((remaining_input, Token::Unary(Operation::Minus))),
  561. + _ => panic!("Should never occur")
  562. + }
  563. },
  564. + Err(e) => Err(e)
  565. }
  566. }
  567. -fn number(input: &[u8]) -> IResult<&[u8], Token> {
  568. - use nom::Err;
  569. - use nom::ErrorKind;
  570. - use nom::IResult::*;
  571. - use std::str::FromStr;
  572. -
  573. - match float(input) {
  574. - Done(rest, l) => {
  575. - // it should be safe to call unwrap here instead of the error checking, since
  576. - // `float` should match only well-formed numbers
  577. - from_utf8(&input[..l])
  578. - .ok()
  579. - .and_then(|s| f64::from_str(s).ok())
  580. - .map_or(Error(Err::Position(ErrorKind::Custom(0), input)), |f| {
  581. - Done(rest, Token::Number(f))
  582. - })
  583. - }
  584. - Error(e) => Error(e),
  585. - Incomplete(n) => Incomplete(n),
  586. - }
  587. +/// factorial parse
  588. +fn fact<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  589. + map(tag("!"), |s: &str| Token::Unary(Operation::Fact))(i)
  590. }
  591. -named!(
  592. - lexpr<Token>,
  593. - delimited!(
  594. - opt!(multispace),
  595. - alt!(number | func | var | negpos | lparen),
  596. - opt!(multispace)
  597. - )
  598. -);
  599. -named!(
  600. - after_rexpr<Token>,
  601. - delimited!(
  602. - opt!(multispace),
  603. - alt!(fact | binop | rparen),
  604. - opt!(multispace)
  605. - )
  606. -);
  607. -named!(
  608. - after_rexpr_no_paren<Token>,
  609. - delimited!(opt!(multispace), alt!(fact | binop), opt!(multispace))
  610. -);
  611. -named!(
  612. - after_rexpr_comma<Token>,
  613. - delimited!(
  614. - opt!(multispace),
  615. - alt!(fact | binop | rparen | comma),
  616. - opt!(multispace)
  617. - )
  618. -);
  619. +fn ident<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  620. + let REMAINING_CHARS: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
  621. + let FIRST_CHARS: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
  622. +
  623. + // Returns whole strings matched by the given parser.
  624. + recognize(
  625. + // Runs the first parser, if succeeded then runs second, and returns the second result.
  626. + // Note that returned ok value of `preceded()` is ignored by `recognize()`.
  627. + preceded(
  628. + // Parses a single character contained in the given string.
  629. + one_of(FIRST_CHARS),
  630. + // Parses the longest slice consisting of the given characters
  631. + opt(is_a(REMAINING_CHARS)),
  632. + )
  633. + )(i)
  634. + }
  635. +
  636. +fn var<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  637. + map(complete(ident), |s: &str| Token::Var(s.into()))(i)
  638. +}
  639. +
  640. +/// Parse `func(`, returns `func`.
  641. +fn func<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  642. + map(
  643. + //recognize(
  644. + terminated(
  645. + complete(ident),
  646. + preceded(multispace0,
  647. + complete(tag("("))
  648. + )
  649. + )
  650. + //)
  651. + ,
  652. + |s: &str| Token::Func(s.into(), None)
  653. + )(i)
  654. +}
  655. +
  656. +fn number<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  657. + preceded(
  658. + peek(one_of("0123456789")),
  659. + map(double, |s| Token::Number(s))
  660. + )(i)
  661. +}
  662. +
  663. +fn lexpr<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  664. +
  665. + delimited(
  666. + multispace0,
  667. + alt((number, func, var, negpos, lparen)),
  668. + multispace0
  669. + )(i)
  670. +}
  671. +
  672. +
  673. +fn after_rexpr<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  674. +
  675. + delimited(
  676. + multispace0,
  677. + alt((fact, binop, rparen)),
  678. + multispace0
  679. + )(i)
  680. +}
  681. +
  682. +fn after_rexpr_no_paren<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  683. +
  684. + delimited(
  685. + multispace0,
  686. + alt((fact, binop)),
  687. + multispace0
  688. + )(i)
  689. +}
  690. +
  691. +fn after_rexpr_comma<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  692. +
  693. + delimited(
  694. + multispace0,
  695. + alt((fact, binop, rparen, comma)),
  696. + multispace0
  697. + )(i)
  698. +}
  699. #[derive(Debug, Clone, Copy)]
  700. enum TokenizerState {
  701. @@ -270,6 +265,7 @@ enum ParenState {
  702. Func,
  703. }
  704. +
  705. /// Tokenize a given mathematical expression.
  706. ///
  707. /// The parser should return `Ok` only if the expression is well-formed.
  708. @@ -277,29 +273,27 @@ enum ParenState {
  709. /// # Failure
  710. ///
  711. /// Returns `Err` if the expression is not well-formed.
  712. -pub fn tokenize<S: AsRef<str>>(input: S) -> Result<Vec<Token>, ParseError> {
  713. - use self::TokenizerState::*;
  714. - use nom::Err;
  715. - use nom::IResult::*;
  716. - let mut state = LExpr;
  717. +pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  718. + let mut state: TokenizerState = TokenizerState::LExpr;
  719. // number of function arguments left
  720. let mut paren_stack = vec![];
  721. let mut res = vec![];
  722. - let input = input.as_ref().as_bytes();
  723. let mut s = input;
  724. while !s.is_empty() {
  725. +
  726. let r = match (state, paren_stack.last()) {
  727. - (LExpr, _) => lexpr(s),
  728. - (AfterRExpr, None) => after_rexpr_no_paren(s),
  729. - (AfterRExpr, Some(&ParenState::Subexpr)) => after_rexpr(s),
  730. - (AfterRExpr, Some(&ParenState::Func)) => after_rexpr_comma(s),
  731. + (TokenizerState::LExpr, _) => lexpr(s),
  732. + (TokenizerState::AfterRExpr, None) => after_rexpr_no_paren(s),
  733. + (TokenizerState::AfterRExpr, Some(&ParenState::Subexpr)) => after_rexpr(s),
  734. + (TokenizerState::AfterRExpr, Some(&ParenState::Func)) => after_rexpr_comma(s),
  735. };
  736. match r {
  737. - Done(rest, t) => {
  738. + Ok((rest, t)) => {
  739. +
  740. match t {
  741. Token::LParen => {
  742. paren_stack.push(ParenState::Subexpr);
  743. @@ -311,133 +305,366 @@ pub fn tokenize<S: AsRef<str>>(input: S) -> Result<Vec<Token>, ParseError> {
  744. paren_stack.pop().expect("The paren_stack is empty!");
  745. }
  746. Token::Var(_) | Token::Number(_) => {
  747. - state = AfterRExpr;
  748. + state = TokenizerState::AfterRExpr;
  749. }
  750. Token::Binary(_) | Token::Comma => {
  751. - state = LExpr;
  752. + state = TokenizerState::LExpr;
  753. }
  754. _ => {}
  755. }
  756. res.push(t);
  757. s = rest;
  758. }
  759. - Error(Err::Position(_, p)) => {
  760. - let (i, _) = slice_to_offsets(input, p);
  761. - return Err(ParseError::UnexpectedToken(i));
  762. + Err(e) => {
  763. +
  764. + match e {
  765. + Err::Error((value, _)) => {
  766. + return Err(TokenParseError::UnexpectedStrToken(value.to_string()));
  767. + },
  768. + _ => (),
  769. + }
  770. +
  771. + return Err(TokenParseError::UnknownError);
  772. }
  773. + // Error(Err::Position(_, p)) => {
  774. + // let (i, _) = slice_to_offsets(input, p);
  775. + // return Err(TokenParseError::UnexpectedToken(i));
  776. + // }
  777. _ => {
  778. - panic!(
  779. - "Unexpected parse result when parsing `{}` at `{}`: {:?}",
  780. - String::from_utf8_lossy(input),
  781. - String::from_utf8_lossy(s),
  782. - r
  783. - );
  784. + panic!("Unexpected parse result when parsing `{}` at `{}`: {:?}", input, s, r);
  785. }
  786. }
  787. +
  788. }
  789. match state {
  790. - LExpr => Err(ParseError::MissingArgument),
  791. - _ if !paren_stack.is_empty() => Err(ParseError::MissingRParen(paren_stack.len() as i32)),
  792. - _ => Ok(res),
  793. + TokenizerState::LExpr => {
  794. + Err(TokenParseError::MissingArgument)
  795. + },
  796. +
  797. + _ => {
  798. + if !paren_stack.is_empty() {
  799. + return Err(TokenParseError::MissingRParen(paren_stack.len() as i32));
  800. + }
  801. +
  802. + return Ok(res);
  803. + }
  804. }
  805. +
  806. +
  807. }
  808. +
  809. +
  810. +
  811. +// ok rest ["+(3--2) "] t Number(2.0)
  812. +// ok rest ["(3--2) "] t Binary(Plus)
  813. +// ok rest [51, 45, 45, 50, 41, 32] t LParen
  814. +// ok rest [45, 45, 50, 41, 32] t Number(3.0)
  815. +// ok rest [45, 50, 41, 32] t Binary(Minus)
  816. +// ok rest [50, 41, 32] t Unary(Minus)
  817. +// ok rest [") "] t Number(2.0)
  818. +// ok rest [] t RParen
  819. +// state: AfterRExpr
  820. +// paren_stack: []
  821. +// Ok([Number(2.0), Binary(Plus), LParen, Number(3.0), Binary(Minus), Unary(Minus), Number(2.0), RParen])
  822. +
  823. +
  824. #[cfg(test)]
  825. mod tests {
  826. use super::*;
  827. - use super::{binop, func, number, var};
  828. - use nom::Err::*;
  829. - use nom::ErrorKind::*;
  830. - use nom::IResult;
  831. -
  832. +
  833. #[test]
  834. fn it_works() {
  835. +
  836. + assert_eq!(
  837. + binop("+"),
  838. + Ok(("", Token::Binary(Operation::Plus)))
  839. + );
  840. + assert_eq!(
  841. + ident("abc32"),
  842. + Ok(("", "abc32"))
  843. + );
  844. + assert_eq!(
  845. + func("abc("),
  846. + Ok(("", Token::Func("abc".into(), None)))
  847. + );
  848. + assert_eq!(
  849. + func("abc ("),
  850. + Ok(("", Token::Func("abc".into(), None)))
  851. + );
  852. + assert_eq!(
  853. + var("abc"),
  854. + Ok(("", Token::Var("abc".into())))
  855. + );
  856. + assert_eq!(
  857. + fact("!"),
  858. + Ok(("", Token::Unary(Operation::Fact)))
  859. + );
  860. + assert_eq!(
  861. + negpos_s("+"),
  862. + Ok(("", "+"))
  863. + );
  864. + assert_eq!(
  865. + negpos_s("-"),
  866. + Ok(("", "-"))
  867. + );
  868. + assert_eq!(
  869. + negpos_s("+362"),
  870. + Ok(("362", "+"))
  871. + );
  872. + assert_eq!(
  873. + negpos_s("-5734"),
  874. + Ok(("5734", "-"))
  875. + );
  876. + assert_eq!(
  877. + negpos("+"),
  878. + Ok(("", Token::Unary(Operation::Plus)))
  879. + );
  880. + assert_eq!(
  881. + negpos("-"),
  882. + Ok(("", Token::Unary(Operation::Minus)))
  883. + );
  884. + assert_eq!(
  885. + negpos("+642"),
  886. + Ok(("642", Token::Unary(Operation::Plus)))
  887. + );
  888. + assert_eq!(
  889. + negpos("-563"),
  890. + Ok(("563", Token::Unary(Operation::Minus)))
  891. + );
  892. + assert_eq!(
  893. + lparen("("),
  894. + Ok(("", Token::LParen))
  895. + );
  896. + assert_eq!(
  897. + rparen(")"),
  898. + Ok(("", Token::RParen))
  899. + );
  900. + assert_eq!(
  901. + comma(","),
  902. + Ok(("", Token::Comma))
  903. + );
  904. assert_eq!(
  905. - binop(b"+"),
  906. - IResult::Done(&b""[..], Token::Binary(Operation::Plus))
  907. + comma(","),
  908. + Ok(("", Token::Comma))
  909. );
  910. assert_eq!(
  911. - number(b"32143"),
  912. - IResult::Done(&b""[..], Token::Number(32143f64))
  913. + number("+1.34e2"),
  914. + Ok(("", Token::Number(134.0)))
  915. );
  916. assert_eq!(
  917. - var(b"abc"),
  918. - IResult::Done(&b""[..], Token::Var("abc".into()))
  919. + number("+1.34e+2"),
  920. + Ok(("", Token::Number(134.0)))
  921. );
  922. assert_eq!(
  923. - func(b"abc("),
  924. - IResult::Done(&b""[..], Token::Func("abc".into(), None))
  925. + number("3E+2"),
  926. + Ok(("", Token::Number(300.0)))
  927. );
  928. assert_eq!(
  929. - func(b"abc ("),
  930. - IResult::Done(&b""[..], Token::Func("abc".into(), None))
  931. + number("+4E+2"),
  932. + Ok(("", Token::Number(400.0)))
  933. + );
  934. + assert_eq!(
  935. + number("-4.76E+2"),
  936. + Ok(("", Token::Number(-476.0)))
  937. + );
  938. + assert_eq!(
  939. + number("-4.76"),
  940. + Ok(("", Token::Number(-4.76)))
  941. + );
  942. + assert_eq!(
  943. + number("+4.76"),
  944. + Ok(("", Token::Number(4.76)))
  945. + );
  946. + assert_eq!(
  947. + number("1.1"),
  948. + Ok(("", Token::Number(1.1)))
  949. + );
  950. + assert_eq!(
  951. + number("-1.1"),
  952. + Ok(("", Token::Number(-1.1)))
  953. + );
  954. + assert_eq!(
  955. + number("123E-02"),
  956. + Ok(("", Token::Number(1.23)))
  957. + );
  958. + assert_eq!(
  959. + number("+123E-02"),
  960. + Ok(("", Token::Number(1.23)))
  961. + );
  962. + assert_eq!(
  963. + number("-123E-02"),
  964. + Ok(("", Token::Number(-1.23)))
  965. + );
  966. + assert_eq!(
  967. + number("abc"),
  968. + Err(Err::Error(("abc", nom::error::ErrorKind::Float)))
  969. );
  970. }
  971. #[test]
  972. - fn test_var() {
  973. - for &s in ["abc", "U0", "_034", "a_be45EA", "aAzZ_"].iter() {
  974. - assert_eq!(
  975. - var(s.as_bytes()),
  976. - IResult::Done(&b""[..], Token::Var(s.into()))
  977. - );
  978. - }
  979. + fn test_lexpr() {
  980. +
  981. + // number, func, var, negpos, lparen
  982. + assert_eq!(
  983. + number("a"),
  984. + Err(Err::Error(("a", nom::error::ErrorKind::Float)))
  985. + );
  986. +
  987. + assert_eq!(
  988. + func("a"),
  989. + Err(Err::Error(("", nom::error::ErrorKind::Tag)))
  990. + );
  991. +
  992. + assert_eq!(
  993. + var("a"),
  994. + Ok(("", Token::Var("a".into())))
  995. + );
  996. +
  997. + assert_eq!(
  998. + lexpr("a"),
  999. + Ok(("", Token::Var("a".into())))
  1000. + );
  1001. +
  1002. + assert_eq!(
  1003. + lexpr("2+"),
  1004. + Ok(("+", Token::Number(2.0)))
  1005. + );
  1006. +
  1007. + assert_eq!(
  1008. + lexpr("2 +(3--2) "),
  1009. + Ok(("+(3--2) ", Token::Number(2.0)))
  1010. + );
  1011. +
  1012. +
  1013. + println!("{:?}", number("+(3--2) "));
  1014. +
  1015. + assert_eq!(
  1016. + lexpr("+(3--2) "),
  1017. + Ok(("+(3--2) ", Token::Binary(Operation::Plus)))
  1018. + );
  1019. - assert_eq!(var(b""), IResult::Error(Position(Complete, &b""[..])));
  1020. - assert_eq!(var(b"0"), IResult::Error(Position(Custom(0), &b"0"[..])));
  1021. }
  1022. #[test]
  1023. - fn test_func() {
  1024. - for &s in ["abc(", "u0(", "_034 (", "A_be45EA ("].iter() {
  1025. + fn test_var() {
  1026. + for &s in ["abc", "U0", "_034", "a_be45EA", "aAzZ_"].iter() {
  1027. assert_eq!(
  1028. - func(s.as_bytes()),
  1029. - IResult::Done(
  1030. - &b""[..],
  1031. - Token::Func((&s[0..s.len() - 1]).trim().into(), None)
  1032. - )
  1033. + var(s),
  1034. + Ok(("", Token::Var(s.into())))
  1035. );
  1036. }
  1037. - assert_eq!(func(b""), IResult::Error(Position(Complete, &b""[..])));
  1038. - assert_eq!(func(b"("), IResult::Error(Position(Custom(0), &b"("[..])));
  1039. - assert_eq!(func(b"0("), IResult::Error(Position(Custom(0), &b"0("[..])));
  1040. + assert_eq!(var(""), Err(Err::Error(("", nom::error::ErrorKind::OneOf))));
  1041. + assert_eq!(var("0"), Err(Err::Error(("0", nom::error::ErrorKind::OneOf))));
  1042. }
  1043. #[test]
  1044. fn test_number() {
  1045. +
  1046. + assert_eq!(
  1047. + number("45"),
  1048. + Ok(("", Token::Number(45.0)))
  1049. + );
  1050. +
  1051. + assert_eq!(
  1052. + number("+(3--2) "),
  1053. + Err(Err::Error(("+(3--2) ", nom::error::ErrorKind::OneOf)))
  1054. + );
  1055. +
  1056. + assert_eq!(
  1057. + number("+3 "),
  1058. + Err(Err::Error(("+3 ", nom::error::ErrorKind::OneOf)))
  1059. + );
  1060. +
  1061. assert_eq!(
  1062. - number(b"32143"),
  1063. - IResult::Done(&b""[..], Token::Number(32143f64))
  1064. + number("(3--2) "),
  1065. + Err(Err::Error(("(3--2) ", nom::error::ErrorKind::OneOf)))
  1066. + );
  1067. +
  1068. + assert_eq!(
  1069. + number("(3) "),
  1070. + Err(Err::Error(("(3) ", nom::error::ErrorKind::OneOf)))
  1071. + );
  1072. + assert_eq!(
  1073. + number("(3) - (2) "),
  1074. + Err(Err::Error(("(3) - (2) ", nom::error::ErrorKind::OneOf)))
  1075. + );
  1076. + assert_eq!(
  1077. + number("32143"),
  1078. + Ok(("", Token::Number(32143f64)))
  1079. + );
  1080. + assert_eq!(
  1081. + number("2."),
  1082. + Ok(("", Token::Number(2.0f64)))
  1083. );
  1084. assert_eq!(
  1085. - number(b"2."),
  1086. - IResult::Done(&b""[..], Token::Number(2.0f64))
  1087. + number("32143.25"),
  1088. + Ok(("", Token::Number(32143.25f64)))
  1089. );
  1090. assert_eq!(
  1091. - number(b"32143.25"),
  1092. - IResult::Done(&b""[..], Token::Number(32143.25f64))
  1093. + number("0.125e9"),
  1094. + Ok(("", Token::Number(0.125e9f64)))
  1095. );
  1096. assert_eq!(
  1097. - number(b"0.125e9"),
  1098. - IResult::Done(&b""[..], Token::Number(0.125e9f64))
  1099. + number("20.5E-3"),
  1100. + Ok(("", Token::Number(20.5E-3f64)))
  1101. );
  1102. assert_eq!(
  1103. - number(b"20.5E-3"),
  1104. - IResult::Done(&b""[..], Token::Number(20.5E-3f64))
  1105. + number("123423e+50"),
  1106. + Ok(("", Token::Number(123423e+50f64)))
  1107. );
  1108. assert_eq!(
  1109. - number(b"123423e+50"),
  1110. - IResult::Done(&b""[..], Token::Number(123423e+50f64))
  1111. + number("0.2"),
  1112. + Ok(("", Token::Number(0.2)))
  1113. );
  1114. + assert_eq!(
  1115. + number(""),
  1116. + Err(Err::Error(("", nom::error::ErrorKind::OneOf)))
  1117. + );
  1118. + assert_eq!(
  1119. + number("+"),
  1120. + Err(Err::Error(("+", nom::error::ErrorKind::OneOf)))
  1121. + );
  1122. + assert_eq!(
  1123. + number("e"),
  1124. + Err(Err::Error(("e", nom::error::ErrorKind::OneOf)))
  1125. + );
  1126. + assert_eq!(
  1127. + number("1E"),
  1128. + Err(Err::Error(("E", nom::error::ErrorKind::Eof)))
  1129. + );
  1130. + assert_eq!(
  1131. + number("1e"),
  1132. + Err(Err::Error(("e", nom::error::ErrorKind::Eof)))
  1133. + );
  1134. + assert_eq!(
  1135. + number("1e+"),
  1136. + Err(Err::Error(("+", nom::error::ErrorKind::Eof)))
  1137. + );
  1138. + assert_eq!(
  1139. + number("1e+-?%"),
  1140. + Err(Err::Error(("-?%", nom::error::ErrorKind::Eof)))
  1141. + );
  1142. + assert_eq!(
  1143. + number("2+"),
  1144. + Err(Err::Error(("+", nom::error::ErrorKind::Eof)))
  1145. + );
  1146. + }
  1147. - assert_eq!(number(b""), IResult::Error(Position(Digit, &b""[..])));
  1148. - assert_eq!(number(b".2"), IResult::Error(Position(Digit, &b".2"[..])));
  1149. - assert_eq!(number(b"+"), IResult::Error(Position(Digit, &b"+"[..])));
  1150. - assert_eq!(number(b"e"), IResult::Error(Position(Digit, &b"e"[..])));
  1151. - assert_eq!(number(b"1E"), IResult::Error(Position(Complete, &b"E"[..])));
  1152. - assert_eq!(number(b"1e+"), IResult::Error(Position(Digit, &b""[..])));
  1153. + #[test]
  1154. + fn test_func() {
  1155. + for &s in ["abc(", "u0(", "_034 (", "A_be45EA ("].iter() {
  1156. + assert_eq!(
  1157. + func(s),
  1158. + Ok(("", Token::Func((&s[0..s.len() - 1]).trim().into(), None)))
  1159. + );
  1160. + }
  1161. +
  1162. + assert_eq!(func(""), Err(Err::Error(("", nom::error::ErrorKind::OneOf))));
  1163. + assert_eq!(func("("), Err(Err::Error(("(", nom::error::ErrorKind::OneOf))));
  1164. + assert_eq!(func("0("), Err(Err::Error(("0(", nom::error::ErrorKind::OneOf))));
  1165. }
  1166. #[test]
  1167. @@ -445,6 +672,10 @@ mod tests {
  1168. use super::Operation::*;
  1169. use super::Token::*;
  1170. + // Ok([Number(2.0), Binary(Plus), Number(2.0), Binary(Div), Number(3.0), Binary(Minus), Number(56.0), Binary(Plus), Func("sin", None), Number(3.0), RParen])
  1171. + // Ok([Number(2.0), Binary(Plus), Number(2.0), Binary(Div), Number(3.0), Binary(Minus), Number(56.0), Binary(Plus), Func("sin", None), Number(3.0), RParen])
  1172. + println!("{:?}", tokenize("2 + 2/3-56 + sin(3)"));
  1173. +
  1174. assert_eq!(tokenize("a"), Ok(vec![Var("a".into())]));
  1175. assert_eq!(
  1176. @@ -522,15 +753,15 @@ mod tests {
  1177. ])
  1178. );
  1179. - assert_eq!(tokenize("!3"), Err(ParseError::UnexpectedToken(0)));
  1180. + assert_eq!(tokenize("!3"), Err(TokenParseError::UnexpectedStrToken("!3".to_string())));
  1181. - assert_eq!(tokenize("()"), Err(ParseError::UnexpectedToken(1)));
  1182. + assert_eq!(tokenize("()"), Err(TokenParseError::UnexpectedStrToken(")".to_string())));
  1183. - assert_eq!(tokenize(""), Err(ParseError::MissingArgument));
  1184. - assert_eq!(tokenize("2)"), Err(ParseError::UnexpectedToken(1)));
  1185. - assert_eq!(tokenize("2^"), Err(ParseError::MissingArgument));
  1186. - assert_eq!(tokenize("(((2)"), Err(ParseError::MissingRParen(2)));
  1187. - assert_eq!(tokenize("f(2,)"), Err(ParseError::UnexpectedToken(4)));
  1188. - assert_eq!(tokenize("f(,2)"), Err(ParseError::UnexpectedToken(2)));
  1189. + assert_eq!(tokenize(""), Err(TokenParseError::MissingArgument));
  1190. + assert_eq!(tokenize("2)"), Err(TokenParseError::UnexpectedStrToken(")".to_string())));
  1191. + assert_eq!(tokenize("2^"), Err(TokenParseError::MissingArgument));
  1192. + assert_eq!(tokenize("(((2)"), Err(TokenParseError::MissingRParen(2)));
  1193. + assert_eq!(tokenize("f(2,)"), Err(TokenParseError::UnexpectedStrToken(")".to_string())));
  1194. + assert_eq!(tokenize("f(,2)"), Err(TokenParseError::UnexpectedStrToken(",2)".to_string())));
  1195. }
  1196. -}
  1197. +}
  1198. \ No newline at end of file
  1199. From f888f42034a37f0f8a41fe88d091246ff4b8a574 Mon Sep 17 00:00:00 2001
  1200. From: Simon Gardling <titaniumtown@gmail.com>
  1201. Date: Wed, 16 Feb 2022 10:39:10 -0500
  1202. Subject: [PATCH 05/10] clippy
  1203. ---
  1204. src/expr.rs | 22 +++++++++++-----------
  1205. src/extra_math.rs | 10 +++++-----
  1206. src/tokenizer.rs | 25 ++++++++++++-------------
  1207. 3 files changed, 28 insertions(+), 29 deletions(-)
  1208. diff --git a/src/expr.rs b/src/expr.rs
  1209. index f6b303c..c151756 100644
  1210. --- a/src/expr.rs
  1211. +++ b/src/expr.rs
  1212. @@ -390,7 +390,7 @@ impl Expr {
  1213. {
  1214. let n = vars.len();
  1215. self.check_context((
  1216. - vars.into_iter()
  1217. + vars.iter()
  1218. .zip(vec![0.; n].into_iter())
  1219. .collect::<Vec<_>>(),
  1220. &ctx
  1221. @@ -399,7 +399,7 @@ impl Expr {
  1222. Ok(move |x: &[f64]| {
  1223. self.eval_with_context((
  1224. vars.iter()
  1225. - .zip(x.into_iter())
  1226. + .zip(x.iter())
  1227. .map(|(v, x)| (v, *x))
  1228. .collect::<Vec<_>>(),
  1229. &ctx,
  1230. @@ -460,7 +460,7 @@ impl FromStr for Expr {
  1231. let rpn = to_rpn(&tokens)?;
  1232. - Ok(Expr { rpn: rpn })
  1233. + Ok(Expr { rpn })
  1234. }
  1235. }
  1236. @@ -599,21 +599,21 @@ pub fn builtin<'a>() -> Context<'a> {
  1237. impl<'a, T: ContextProvider> ContextProvider for &'a T {
  1238. fn get_var(&self, name: &str) -> Option<f64> {
  1239. - (&**self).get_var(name)
  1240. + (**self).get_var(name)
  1241. }
  1242. fn eval_func(&self, name: &str, args: &[f64]) -> Result<f64, FuncEvalError> {
  1243. - (&**self).eval_func(name, args)
  1244. + (**self).eval_func(name, args)
  1245. }
  1246. }
  1247. impl<'a, T: ContextProvider> ContextProvider for &'a mut T {
  1248. fn get_var(&self, name: &str) -> Option<f64> {
  1249. - (&**self).get_var(name)
  1250. + (**self).get_var(name)
  1251. }
  1252. fn eval_func(&self, name: &str, args: &[f64]) -> Result<f64, FuncEvalError> {
  1253. - (&**self).eval_func(name, args)
  1254. + (**self).eval_func(name, args)
  1255. }
  1256. }
  1257. @@ -1170,20 +1170,20 @@ mod tests {
  1258. );
  1259. let expr = Expr::from_str("x + y^2 + z^3").unwrap();
  1260. - let func = expr.clone().bind3("x", "y", "z").unwrap();
  1261. + let func = expr.bind3("x", "y", "z").unwrap();
  1262. assert_eq!(func(1., 2., 3.), 32.);
  1263. let expr = Expr::from_str("sin(x)").unwrap();
  1264. - let func = expr.clone().bind("x").unwrap();
  1265. + let func = expr.bind("x").unwrap();
  1266. assert_eq!(func(1.), (1f64).sin());
  1267. let expr = Expr::from_str("sin(x,2)").unwrap();
  1268. - match expr.clone().bind("x") {
  1269. + match expr.bind("x") {
  1270. Err(Error::Function(_, FuncEvalError::NumberArgs(1))) => {}
  1271. _ => panic!("bind did not error"),
  1272. }
  1273. let expr = Expr::from_str("hey(x,2)").unwrap();
  1274. - match expr.clone().bind("x") {
  1275. + match expr.bind("x") {
  1276. Err(Error::Function(_, FuncEvalError::UnknownFunction)) => {}
  1277. _ => panic!("bind did not error"),
  1278. }
  1279. diff --git a/src/extra_math.rs b/src/extra_math.rs
  1280. index 63b6296..39d31f7 100644
  1281. --- a/src/extra_math.rs
  1282. +++ b/src/extra_math.rs
  1283. @@ -2,19 +2,19 @@
  1284. // This is to take advantage of the fact that std::f64::MAX >>> std::u64::MAX
  1285. fn factorial_unsafe(num: f64) -> f64 {
  1286. if num == 0. || num == 1. {
  1287. - return 1.;
  1288. + 1.
  1289. } else {
  1290. - return num * factorial_unsafe(num - 1.);
  1291. + num * factorial_unsafe(num - 1.)
  1292. }
  1293. }
  1294. pub fn factorial(num: f64) -> Result<f64, &'static str> {
  1295. if num.fract() != 0. || num < 0. {
  1296. - return Err("Number must be non-negative with no fractional component!");
  1297. + Err("Number must be non-negative with no fractional component!")
  1298. } else if num > 170. {
  1299. - return Ok(std::f64::INFINITY);
  1300. + Ok(std::f64::INFINITY)
  1301. } else {
  1302. - return Ok(factorial_unsafe(num));
  1303. + Ok(factorial_unsafe(num))
  1304. }
  1305. }
  1306. diff --git a/src/tokenizer.rs b/src/tokenizer.rs
  1307. index 0e08337..322eb15 100644
  1308. --- a/src/tokenizer.rs
  1309. +++ b/src/tokenizer.rs
  1310. @@ -9,18 +9,17 @@
  1311. use nom::{
  1312. branch::alt,
  1313. bytes::complete::is_a,
  1314. - bytes::complete::{escaped, take, tag, take_while},
  1315. - character::complete::{anychar, digit1, multispace0, alphanumeric1, alphanumeric0, char, alpha1, one_of},
  1316. - combinator::{complete, peek, all_consuming, recognize, map, opt, cut, not},
  1317. - error::{context, convert_error, ErrorKind, ParseError, VerboseError},
  1318. - multi::separated_list0,
  1319. + bytes::complete::{tag},
  1320. + character::complete::{multispace0, one_of},
  1321. + combinator::{complete, peek, recognize, map, opt},
  1322. + error::{ErrorKind},
  1323. number::complete::double,
  1324. - sequence::{tuple, pair, delimited, preceded, separated_pair, terminated},
  1325. - Err, Needed, IResult
  1326. + sequence::{delimited, preceded, terminated},
  1327. + Err, IResult
  1328. };
  1329. use std::fmt;
  1330. -use std::str::from_utf8;
  1331. +
  1332. use std::f64;
  1333. /// An error reported by the parser.
  1334. @@ -155,7 +154,7 @@ fn negpos<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1335. match negpos_s(i) {
  1336. Ok((remaining_input, operator)) => {
  1337. - match operator.as_ref() {
  1338. + match operator {
  1339. "+" => Ok((remaining_input, Token::Unary(Operation::Plus))),
  1340. "-" => Ok((remaining_input, Token::Unary(Operation::Minus))),
  1341. _ => panic!("Should never occur")
  1342. @@ -167,7 +166,7 @@ fn negpos<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1343. /// factorial parse
  1344. fn fact<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1345. - map(tag("!"), |s: &str| Token::Unary(Operation::Fact))(i)
  1346. + map(tag("!"), |_s: &str| Token::Unary(Operation::Fact))(i)
  1347. }
  1348. fn ident<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  1349. @@ -210,7 +209,7 @@ fn func<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1350. fn number<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1351. preceded(
  1352. peek(one_of("0123456789")),
  1353. - map(double, |s| Token::Number(s))
  1354. + map(double, Token::Number)
  1355. )(i)
  1356. }
  1357. @@ -347,7 +346,7 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1358. return Err(TokenParseError::MissingRParen(paren_stack.len() as i32));
  1359. }
  1360. - return Ok(res);
  1361. + Ok(res)
  1362. }
  1363. }
  1364. @@ -658,7 +657,7 @@ mod tests {
  1365. for &s in ["abc(", "u0(", "_034 (", "A_be45EA ("].iter() {
  1366. assert_eq!(
  1367. func(s),
  1368. - Ok(("", Token::Func((&s[0..s.len() - 1]).trim().into(), None)))
  1369. + Ok(("", Token::Func(s[0..s.len() - 1].trim().into(), None)))
  1370. );
  1371. }
  1372. From ad90ef78e9986dbd3c84a0743706384b433a4d38 Mon Sep 17 00:00:00 2001
  1373. From: Simon Gardling <titaniumtown@gmail.com>
  1374. Date: Wed, 16 Feb 2022 10:39:18 -0500
  1375. Subject: [PATCH 06/10] rustfmt
  1376. ---
  1377. src/expr.rs | 8 +-
  1378. src/shunting_yard.rs | 1 -
  1379. src/tokenizer.rs | 447 +++++++++++++++----------------------------
  1380. 3 files changed, 152 insertions(+), 304 deletions(-)
  1381. diff --git a/src/expr.rs b/src/expr.rs
  1382. index c151756..04b54b5 100644
  1383. --- a/src/expr.rs
  1384. +++ b/src/expr.rs
  1385. @@ -340,7 +340,7 @@ impl Expr {
  1386. {
  1387. self.check_context((
  1388. [(var1, 0.), (var2, 0.), (var3, 0.), (var4, 0.), (var5, 0.)],
  1389. - &ctx
  1390. + &ctx,
  1391. ))?;
  1392. let var1 = var1.to_owned();
  1393. let var2 = var2.to_owned();
  1394. @@ -390,10 +390,8 @@ impl Expr {
  1395. {
  1396. let n = vars.len();
  1397. self.check_context((
  1398. - vars.iter()
  1399. - .zip(vec![0.; n].into_iter())
  1400. - .collect::<Vec<_>>(),
  1401. - &ctx
  1402. + vars.iter().zip(vec![0.; n].into_iter()).collect::<Vec<_>>(),
  1403. + &ctx,
  1404. ))?;
  1405. let vars = vars.iter().map(|v| v.to_owned()).collect::<Vec<_>>();
  1406. Ok(move |x: &[f64]| {
  1407. diff --git a/src/shunting_yard.rs b/src/shunting_yard.rs
  1408. index 90026bf..5fe3010 100644
  1409. --- a/src/shunting_yard.rs
  1410. +++ b/src/shunting_yard.rs
  1411. @@ -1,4 +1,3 @@
  1412. -
  1413. //! Implementation of the shunting-yard algorithm for converting an infix expression to an
  1414. //! expression in reverse Polish notation (RPN).
  1415. //!
  1416. diff --git a/src/tokenizer.rs b/src/tokenizer.rs
  1417. index 322eb15..190fa51 100644
  1418. --- a/src/tokenizer.rs
  1419. +++ b/src/tokenizer.rs
  1420. @@ -7,15 +7,15 @@
  1421. //! [nom]: https://crates.io/crates/nom
  1422. use nom::{
  1423. - branch::alt,
  1424. - bytes::complete::is_a,
  1425. - bytes::complete::{tag},
  1426. - character::complete::{multispace0, one_of},
  1427. - combinator::{complete, peek, recognize, map, opt},
  1428. - error::{ErrorKind},
  1429. - number::complete::double,
  1430. - sequence::{delimited, preceded, terminated},
  1431. - Err, IResult
  1432. + branch::alt,
  1433. + bytes::complete::is_a,
  1434. + bytes::complete::tag,
  1435. + character::complete::{multispace0, one_of},
  1436. + combinator::{complete, map, opt, peek, recognize},
  1437. + error::ErrorKind,
  1438. + number::complete::double,
  1439. + sequence::{delimited, preceded, terminated},
  1440. + Err, IResult,
  1441. };
  1442. use std::fmt;
  1443. @@ -36,7 +36,7 @@ pub enum TokenParseError {
  1444. /// Missing operator or function argument at the end of the expression.
  1445. MissingArgument,
  1446. - UnknownError
  1447. + UnknownError,
  1448. }
  1449. impl fmt::Display for TokenParseError {
  1450. @@ -50,7 +50,9 @@ impl fmt::Display for TokenParseError {
  1451. i,
  1452. if *i == 1 { "is" } else { "es" }
  1453. ),
  1454. - TokenParseError::MissingArgument => write!(f, "Missing argument at the end of expression."),
  1455. + TokenParseError::MissingArgument => {
  1456. + write!(f, "Missing argument at the end of expression.")
  1457. + }
  1458. TokenParseError::UnknownError => write!(f, "Unknown pass error."),
  1459. }
  1460. }
  1461. @@ -94,7 +96,7 @@ pub enum Token {
  1462. RParen,
  1463. /// Comma: function argument separator
  1464. Comma,
  1465. - /// Decimal Point
  1466. + /// Decimal Point
  1467. //DecimalPoint,
  1468. /// A number.
  1469. @@ -108,24 +110,24 @@ pub enum Token {
  1470. /// Continuing the trend of starting from the simplest piece and building up,
  1471. /// we start by creating a parser for the built-in operator functions.
  1472. fn binop<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1473. - // one_of matches one of the characters we give it
  1474. - let (i, t) = one_of("+-*/%^!")(i)?;
  1475. -
  1476. - // because we are matching single character tokens, we can do the matching logic
  1477. - // on the returned value
  1478. - Ok((
  1479. - i,
  1480. - match t {
  1481. - '+' => Token::Binary(Operation::Plus),
  1482. - '-' => Token::Binary(Operation::Minus),
  1483. - '*' => Token::Binary(Operation::Times),
  1484. - '/' => Token::Binary(Operation::Div),
  1485. - '%' => Token::Binary(Operation::Rem),
  1486. - '^' => Token::Binary(Operation::Pow),
  1487. - '!' => Token::Binary(Operation::Fact),
  1488. - _ => unreachable!(),
  1489. - },
  1490. - ))
  1491. + // one_of matches one of the characters we give it
  1492. + let (i, t) = one_of("+-*/%^!")(i)?;
  1493. +
  1494. + // because we are matching single character tokens, we can do the matching logic
  1495. + // on the returned value
  1496. + Ok((
  1497. + i,
  1498. + match t {
  1499. + '+' => Token::Binary(Operation::Plus),
  1500. + '-' => Token::Binary(Operation::Minus),
  1501. + '*' => Token::Binary(Operation::Times),
  1502. + '/' => Token::Binary(Operation::Div),
  1503. + '%' => Token::Binary(Operation::Rem),
  1504. + '^' => Token::Binary(Operation::Pow),
  1505. + '!' => Token::Binary(Operation::Fact),
  1506. + _ => unreachable!(),
  1507. + },
  1508. + ))
  1509. }
  1510. fn lparen<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1511. @@ -142,25 +144,21 @@ fn comma<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1512. /// negpos parse. detects either - or +
  1513. fn negpos_s<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  1514. -
  1515. - match alt((tag("+"), tag("-"), ))(i) {
  1516. + match alt((tag("+"), tag("-")))(i) {
  1517. Ok((remaining_input, operator)) => Ok((remaining_input, operator)),
  1518. - Err(e) => Err(e)
  1519. + Err(e) => Err(e),
  1520. }
  1521. }
  1522. /// negpos parse. detects either - or +
  1523. fn negpos<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1524. -
  1525. - match negpos_s(i) {
  1526. - Ok((remaining_input, operator)) => {
  1527. - match operator {
  1528. - "+" => Ok((remaining_input, Token::Unary(Operation::Plus))),
  1529. - "-" => Ok((remaining_input, Token::Unary(Operation::Minus))),
  1530. - _ => panic!("Should never occur")
  1531. - }
  1532. + match negpos_s(i) {
  1533. + Ok((remaining_input, operator)) => match operator {
  1534. + "+" => Ok((remaining_input, Token::Unary(Operation::Plus))),
  1535. + "-" => Ok((remaining_input, Token::Unary(Operation::Minus))),
  1536. + _ => panic!("Should never occur"),
  1537. },
  1538. - Err(e) => Err(e)
  1539. + Err(e) => Err(e),
  1540. }
  1541. }
  1542. @@ -172,19 +170,19 @@ fn fact<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1543. fn ident<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  1544. let REMAINING_CHARS: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
  1545. let FIRST_CHARS: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
  1546. -
  1547. +
  1548. // Returns whole strings matched by the given parser.
  1549. recognize(
  1550. - // Runs the first parser, if succeeded then runs second, and returns the second result.
  1551. - // Note that returned ok value of `preceded()` is ignored by `recognize()`.
  1552. - preceded(
  1553. - // Parses a single character contained in the given string.
  1554. - one_of(FIRST_CHARS),
  1555. - // Parses the longest slice consisting of the given characters
  1556. - opt(is_a(REMAINING_CHARS)),
  1557. - )
  1558. + // Runs the first parser, if succeeded then runs second, and returns the second result.
  1559. + // Note that returned ok value of `preceded()` is ignored by `recognize()`.
  1560. + preceded(
  1561. + // Parses a single character contained in the given string.
  1562. + one_of(FIRST_CHARS),
  1563. + // Parses the longest slice consisting of the given characters
  1564. + opt(is_a(REMAINING_CHARS)),
  1565. + ),
  1566. )(i)
  1567. - }
  1568. +}
  1569. fn var<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1570. map(complete(ident), |s: &str| Token::Var(s.into()))(i)
  1571. @@ -192,62 +190,35 @@ fn var<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1572. /// Parse `func(`, returns `func`.
  1573. fn func<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1574. - map(
  1575. - //recognize(
  1576. - terminated(
  1577. - complete(ident),
  1578. - preceded(multispace0,
  1579. - complete(tag("("))
  1580. - )
  1581. - )
  1582. - //)
  1583. - ,
  1584. - |s: &str| Token::Func(s.into(), None)
  1585. + map(
  1586. + //recognize(
  1587. + terminated(complete(ident), preceded(multispace0, complete(tag("(")))), //)
  1588. + |s: &str| Token::Func(s.into(), None),
  1589. )(i)
  1590. }
  1591. fn number<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1592. - preceded(
  1593. - peek(one_of("0123456789")),
  1594. - map(double, Token::Number)
  1595. - )(i)
  1596. + preceded(peek(one_of("0123456789")), map(double, Token::Number))(i)
  1597. }
  1598. fn lexpr<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1599. -
  1600. delimited(
  1601. - multispace0,
  1602. - alt((number, func, var, negpos, lparen)),
  1603. - multispace0
  1604. + multispace0,
  1605. + alt((number, func, var, negpos, lparen)),
  1606. + multispace0,
  1607. )(i)
  1608. }
  1609. -
  1610. fn after_rexpr<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1611. -
  1612. - delimited(
  1613. - multispace0,
  1614. - alt((fact, binop, rparen)),
  1615. - multispace0
  1616. - )(i)
  1617. + delimited(multispace0, alt((fact, binop, rparen)), multispace0)(i)
  1618. }
  1619. fn after_rexpr_no_paren<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1620. -
  1621. - delimited(
  1622. - multispace0,
  1623. - alt((fact, binop)),
  1624. - multispace0
  1625. - )(i)
  1626. + delimited(multispace0, alt((fact, binop)), multispace0)(i)
  1627. }
  1628. fn after_rexpr_comma<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  1629. -
  1630. - delimited(
  1631. - multispace0,
  1632. - alt((fact, binop, rparen, comma)),
  1633. - multispace0
  1634. - )(i)
  1635. + delimited(multispace0, alt((fact, binop, rparen, comma)), multispace0)(i)
  1636. }
  1637. #[derive(Debug, Clone, Copy)]
  1638. @@ -264,7 +235,6 @@ enum ParenState {
  1639. Func,
  1640. }
  1641. -
  1642. /// Tokenize a given mathematical expression.
  1643. ///
  1644. /// The parser should return `Ok` only if the expression is well-formed.
  1645. @@ -282,7 +252,6 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1646. let mut s = input;
  1647. while !s.is_empty() {
  1648. -
  1649. let r = match (state, paren_stack.last()) {
  1650. (TokenizerState::LExpr, _) => lexpr(s),
  1651. (TokenizerState::AfterRExpr, None) => after_rexpr_no_paren(s),
  1652. @@ -292,7 +261,6 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1653. match r {
  1654. Ok((rest, t)) => {
  1655. -
  1656. match t {
  1657. Token::LParen => {
  1658. paren_stack.push(ParenState::Subexpr);
  1659. @@ -315,11 +283,10 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1660. s = rest;
  1661. }
  1662. Err(e) => {
  1663. -
  1664. match e {
  1665. Err::Error((value, _)) => {
  1666. return Err(TokenParseError::UnexpectedStrToken(value.to_string()));
  1667. - },
  1668. + }
  1669. _ => (),
  1670. }
  1671. @@ -330,16 +297,16 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1672. // return Err(TokenParseError::UnexpectedToken(i));
  1673. // }
  1674. _ => {
  1675. - panic!("Unexpected parse result when parsing `{}` at `{}`: {:?}", input, s, r);
  1676. + panic!(
  1677. + "Unexpected parse result when parsing `{}` at `{}`: {:?}",
  1678. + input, s, r
  1679. + );
  1680. }
  1681. }
  1682. -
  1683. }
  1684. match state {
  1685. - TokenizerState::LExpr => {
  1686. - Err(TokenParseError::MissingArgument)
  1687. - },
  1688. + TokenizerState::LExpr => Err(TokenParseError::MissingArgument),
  1689. _ => {
  1690. if !paren_stack.is_empty() {
  1691. @@ -349,13 +316,8 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1692. Ok(res)
  1693. }
  1694. }
  1695. -
  1696. -
  1697. }
  1698. -
  1699. -
  1700. -
  1701. // ok rest ["+(3--2) "] t Number(2.0)
  1702. // ok rest ["(3--2) "] t Binary(Plus)
  1703. // ok rest [51, 45, 45, 50, 41, 32] t LParen
  1704. @@ -368,134 +330,42 @@ pub fn tokenize(input: &str) -> Result<Vec<Token>, TokenParseError> {
  1705. // paren_stack: []
  1706. // Ok([Number(2.0), Binary(Plus), LParen, Number(3.0), Binary(Minus), Unary(Minus), Number(2.0), RParen])
  1707. -
  1708. #[cfg(test)]
  1709. mod tests {
  1710. use super::*;
  1711. -
  1712. +
  1713. #[test]
  1714. fn it_works() {
  1715. -
  1716. - assert_eq!(
  1717. - binop("+"),
  1718. - Ok(("", Token::Binary(Operation::Plus)))
  1719. - );
  1720. - assert_eq!(
  1721. - ident("abc32"),
  1722. - Ok(("", "abc32"))
  1723. - );
  1724. - assert_eq!(
  1725. - func("abc("),
  1726. - Ok(("", Token::Func("abc".into(), None)))
  1727. - );
  1728. - assert_eq!(
  1729. - func("abc ("),
  1730. - Ok(("", Token::Func("abc".into(), None)))
  1731. - );
  1732. - assert_eq!(
  1733. - var("abc"),
  1734. - Ok(("", Token::Var("abc".into())))
  1735. - );
  1736. - assert_eq!(
  1737. - fact("!"),
  1738. - Ok(("", Token::Unary(Operation::Fact)))
  1739. - );
  1740. - assert_eq!(
  1741. - negpos_s("+"),
  1742. - Ok(("", "+"))
  1743. - );
  1744. - assert_eq!(
  1745. - negpos_s("-"),
  1746. - Ok(("", "-"))
  1747. - );
  1748. - assert_eq!(
  1749. - negpos_s("+362"),
  1750. - Ok(("362", "+"))
  1751. - );
  1752. - assert_eq!(
  1753. - negpos_s("-5734"),
  1754. - Ok(("5734", "-"))
  1755. - );
  1756. - assert_eq!(
  1757. - negpos("+"),
  1758. - Ok(("", Token::Unary(Operation::Plus)))
  1759. - );
  1760. - assert_eq!(
  1761. - negpos("-"),
  1762. - Ok(("", Token::Unary(Operation::Minus)))
  1763. - );
  1764. - assert_eq!(
  1765. - negpos("+642"),
  1766. - Ok(("642", Token::Unary(Operation::Plus)))
  1767. - );
  1768. - assert_eq!(
  1769. - negpos("-563"),
  1770. - Ok(("563", Token::Unary(Operation::Minus)))
  1771. - );
  1772. - assert_eq!(
  1773. - lparen("("),
  1774. - Ok(("", Token::LParen))
  1775. - );
  1776. - assert_eq!(
  1777. - rparen(")"),
  1778. - Ok(("", Token::RParen))
  1779. - );
  1780. - assert_eq!(
  1781. - comma(","),
  1782. - Ok(("", Token::Comma))
  1783. - );
  1784. - assert_eq!(
  1785. - comma(","),
  1786. - Ok(("", Token::Comma))
  1787. - );
  1788. - assert_eq!(
  1789. - number("+1.34e2"),
  1790. - Ok(("", Token::Number(134.0)))
  1791. - );
  1792. - assert_eq!(
  1793. - number("+1.34e+2"),
  1794. - Ok(("", Token::Number(134.0)))
  1795. - );
  1796. - assert_eq!(
  1797. - number("3E+2"),
  1798. - Ok(("", Token::Number(300.0)))
  1799. - );
  1800. - assert_eq!(
  1801. - number("+4E+2"),
  1802. - Ok(("", Token::Number(400.0)))
  1803. - );
  1804. - assert_eq!(
  1805. - number("-4.76E+2"),
  1806. - Ok(("", Token::Number(-476.0)))
  1807. - );
  1808. - assert_eq!(
  1809. - number("-4.76"),
  1810. - Ok(("", Token::Number(-4.76)))
  1811. - );
  1812. - assert_eq!(
  1813. - number("+4.76"),
  1814. - Ok(("", Token::Number(4.76)))
  1815. - );
  1816. - assert_eq!(
  1817. - number("1.1"),
  1818. - Ok(("", Token::Number(1.1)))
  1819. - );
  1820. - assert_eq!(
  1821. - number("-1.1"),
  1822. - Ok(("", Token::Number(-1.1)))
  1823. - );
  1824. - assert_eq!(
  1825. - number("123E-02"),
  1826. - Ok(("", Token::Number(1.23)))
  1827. - );
  1828. - assert_eq!(
  1829. - number("+123E-02"),
  1830. - Ok(("", Token::Number(1.23)))
  1831. - );
  1832. - assert_eq!(
  1833. - number("-123E-02"),
  1834. - Ok(("", Token::Number(-1.23)))
  1835. - );
  1836. + assert_eq!(binop("+"), Ok(("", Token::Binary(Operation::Plus))));
  1837. + assert_eq!(ident("abc32"), Ok(("", "abc32")));
  1838. + assert_eq!(func("abc("), Ok(("", Token::Func("abc".into(), None))));
  1839. + assert_eq!(func("abc ("), Ok(("", Token::Func("abc".into(), None))));
  1840. + assert_eq!(var("abc"), Ok(("", Token::Var("abc".into()))));
  1841. + assert_eq!(fact("!"), Ok(("", Token::Unary(Operation::Fact))));
  1842. + assert_eq!(negpos_s("+"), Ok(("", "+")));
  1843. + assert_eq!(negpos_s("-"), Ok(("", "-")));
  1844. + assert_eq!(negpos_s("+362"), Ok(("362", "+")));
  1845. + assert_eq!(negpos_s("-5734"), Ok(("5734", "-")));
  1846. + assert_eq!(negpos("+"), Ok(("", Token::Unary(Operation::Plus))));
  1847. + assert_eq!(negpos("-"), Ok(("", Token::Unary(Operation::Minus))));
  1848. + assert_eq!(negpos("+642"), Ok(("642", Token::Unary(Operation::Plus))));
  1849. + assert_eq!(negpos("-563"), Ok(("563", Token::Unary(Operation::Minus))));
  1850. + assert_eq!(lparen("("), Ok(("", Token::LParen)));
  1851. + assert_eq!(rparen(")"), Ok(("", Token::RParen)));
  1852. + assert_eq!(comma(","), Ok(("", Token::Comma)));
  1853. + assert_eq!(comma(","), Ok(("", Token::Comma)));
  1854. + assert_eq!(number("+1.34e2"), Ok(("", Token::Number(134.0))));
  1855. + assert_eq!(number("+1.34e+2"), Ok(("", Token::Number(134.0))));
  1856. + assert_eq!(number("3E+2"), Ok(("", Token::Number(300.0))));
  1857. + assert_eq!(number("+4E+2"), Ok(("", Token::Number(400.0))));
  1858. + assert_eq!(number("-4.76E+2"), Ok(("", Token::Number(-476.0))));
  1859. + assert_eq!(number("-4.76"), Ok(("", Token::Number(-4.76))));
  1860. + assert_eq!(number("+4.76"), Ok(("", Token::Number(4.76))));
  1861. + assert_eq!(number("1.1"), Ok(("", Token::Number(1.1))));
  1862. + assert_eq!(number("-1.1"), Ok(("", Token::Number(-1.1))));
  1863. + assert_eq!(number("123E-02"), Ok(("", Token::Number(1.23))));
  1864. + assert_eq!(number("+123E-02"), Ok(("", Token::Number(1.23))));
  1865. + assert_eq!(number("-123E-02"), Ok(("", Token::Number(-1.23))));
  1866. assert_eq!(
  1867. number("abc"),
  1868. Err(Err::Error(("abc", nom::error::ErrorKind::Float)))
  1869. @@ -504,38 +374,21 @@ mod tests {
  1870. #[test]
  1871. fn test_lexpr() {
  1872. -
  1873. // number, func, var, negpos, lparen
  1874. assert_eq!(
  1875. number("a"),
  1876. Err(Err::Error(("a", nom::error::ErrorKind::Float)))
  1877. );
  1878. - assert_eq!(
  1879. - func("a"),
  1880. - Err(Err::Error(("", nom::error::ErrorKind::Tag)))
  1881. - );
  1882. + assert_eq!(func("a"), Err(Err::Error(("", nom::error::ErrorKind::Tag))));
  1883. - assert_eq!(
  1884. - var("a"),
  1885. - Ok(("", Token::Var("a".into())))
  1886. - );
  1887. + assert_eq!(var("a"), Ok(("", Token::Var("a".into()))));
  1888. - assert_eq!(
  1889. - lexpr("a"),
  1890. - Ok(("", Token::Var("a".into())))
  1891. - );
  1892. + assert_eq!(lexpr("a"), Ok(("", Token::Var("a".into()))));
  1893. - assert_eq!(
  1894. - lexpr("2+"),
  1895. - Ok(("+", Token::Number(2.0)))
  1896. - );
  1897. + assert_eq!(lexpr("2+"), Ok(("+", Token::Number(2.0))));
  1898. - assert_eq!(
  1899. - lexpr("2 +(3--2) "),
  1900. - Ok(("+(3--2) ", Token::Number(2.0)))
  1901. - );
  1902. -
  1903. + assert_eq!(lexpr("2 +(3--2) "), Ok(("+(3--2) ", Token::Number(2.0))));
  1904. println!("{:?}", number("+(3--2) "));
  1905. @@ -543,29 +396,24 @@ mod tests {
  1906. lexpr("+(3--2) "),
  1907. Ok(("+(3--2) ", Token::Binary(Operation::Plus)))
  1908. );
  1909. -
  1910. }
  1911. #[test]
  1912. fn test_var() {
  1913. for &s in ["abc", "U0", "_034", "a_be45EA", "aAzZ_"].iter() {
  1914. - assert_eq!(
  1915. - var(s),
  1916. - Ok(("", Token::Var(s.into())))
  1917. - );
  1918. + assert_eq!(var(s), Ok(("", Token::Var(s.into()))));
  1919. }
  1920. assert_eq!(var(""), Err(Err::Error(("", nom::error::ErrorKind::OneOf))));
  1921. - assert_eq!(var("0"), Err(Err::Error(("0", nom::error::ErrorKind::OneOf))));
  1922. + assert_eq!(
  1923. + var("0"),
  1924. + Err(Err::Error(("0", nom::error::ErrorKind::OneOf)))
  1925. + );
  1926. }
  1927. #[test]
  1928. fn test_number() {
  1929. -
  1930. - assert_eq!(
  1931. - number("45"),
  1932. - Ok(("", Token::Number(45.0)))
  1933. - );
  1934. + assert_eq!(number("45"), Ok(("", Token::Number(45.0))));
  1935. assert_eq!(
  1936. number("+(3--2) "),
  1937. @@ -590,34 +438,13 @@ mod tests {
  1938. number("(3) - (2) "),
  1939. Err(Err::Error(("(3) - (2) ", nom::error::ErrorKind::OneOf)))
  1940. );
  1941. - assert_eq!(
  1942. - number("32143"),
  1943. - Ok(("", Token::Number(32143f64)))
  1944. - );
  1945. - assert_eq!(
  1946. - number("2."),
  1947. - Ok(("", Token::Number(2.0f64)))
  1948. - );
  1949. - assert_eq!(
  1950. - number("32143.25"),
  1951. - Ok(("", Token::Number(32143.25f64)))
  1952. - );
  1953. - assert_eq!(
  1954. - number("0.125e9"),
  1955. - Ok(("", Token::Number(0.125e9f64)))
  1956. - );
  1957. - assert_eq!(
  1958. - number("20.5E-3"),
  1959. - Ok(("", Token::Number(20.5E-3f64)))
  1960. - );
  1961. - assert_eq!(
  1962. - number("123423e+50"),
  1963. - Ok(("", Token::Number(123423e+50f64)))
  1964. - );
  1965. - assert_eq!(
  1966. - number("0.2"),
  1967. - Ok(("", Token::Number(0.2)))
  1968. - );
  1969. + assert_eq!(number("32143"), Ok(("", Token::Number(32143f64))));
  1970. + assert_eq!(number("2."), Ok(("", Token::Number(2.0f64))));
  1971. + assert_eq!(number("32143.25"), Ok(("", Token::Number(32143.25f64))));
  1972. + assert_eq!(number("0.125e9"), Ok(("", Token::Number(0.125e9f64))));
  1973. + assert_eq!(number("20.5E-3"), Ok(("", Token::Number(20.5E-3f64))));
  1974. + assert_eq!(number("123423e+50"), Ok(("", Token::Number(123423e+50f64))));
  1975. + assert_eq!(number("0.2"), Ok(("", Token::Number(0.2))));
  1976. assert_eq!(
  1977. number(""),
  1978. Err(Err::Error(("", nom::error::ErrorKind::OneOf)))
  1979. @@ -661,9 +488,18 @@ mod tests {
  1980. );
  1981. }
  1982. - assert_eq!(func(""), Err(Err::Error(("", nom::error::ErrorKind::OneOf))));
  1983. - assert_eq!(func("("), Err(Err::Error(("(", nom::error::ErrorKind::OneOf))));
  1984. - assert_eq!(func("0("), Err(Err::Error(("0(", nom::error::ErrorKind::OneOf))));
  1985. + assert_eq!(
  1986. + func(""),
  1987. + Err(Err::Error(("", nom::error::ErrorKind::OneOf)))
  1988. + );
  1989. + assert_eq!(
  1990. + func("("),
  1991. + Err(Err::Error(("(", nom::error::ErrorKind::OneOf)))
  1992. + );
  1993. + assert_eq!(
  1994. + func("0("),
  1995. + Err(Err::Error(("0(", nom::error::ErrorKind::OneOf)))
  1996. + );
  1997. }
  1998. #[test]
  1999. @@ -752,15 +588,30 @@ mod tests {
  2000. ])
  2001. );
  2002. - assert_eq!(tokenize("!3"), Err(TokenParseError::UnexpectedStrToken("!3".to_string())));
  2003. + assert_eq!(
  2004. + tokenize("!3"),
  2005. + Err(TokenParseError::UnexpectedStrToken("!3".to_string()))
  2006. + );
  2007. - assert_eq!(tokenize("()"), Err(TokenParseError::UnexpectedStrToken(")".to_string())));
  2008. + assert_eq!(
  2009. + tokenize("()"),
  2010. + Err(TokenParseError::UnexpectedStrToken(")".to_string()))
  2011. + );
  2012. assert_eq!(tokenize(""), Err(TokenParseError::MissingArgument));
  2013. - assert_eq!(tokenize("2)"), Err(TokenParseError::UnexpectedStrToken(")".to_string())));
  2014. + assert_eq!(
  2015. + tokenize("2)"),
  2016. + Err(TokenParseError::UnexpectedStrToken(")".to_string()))
  2017. + );
  2018. assert_eq!(tokenize("2^"), Err(TokenParseError::MissingArgument));
  2019. assert_eq!(tokenize("(((2)"), Err(TokenParseError::MissingRParen(2)));
  2020. - assert_eq!(tokenize("f(2,)"), Err(TokenParseError::UnexpectedStrToken(")".to_string())));
  2021. - assert_eq!(tokenize("f(,2)"), Err(TokenParseError::UnexpectedStrToken(",2)".to_string())));
  2022. + assert_eq!(
  2023. + tokenize("f(2,)"),
  2024. + Err(TokenParseError::UnexpectedStrToken(")".to_string()))
  2025. + );
  2026. + assert_eq!(
  2027. + tokenize("f(,2)"),
  2028. + Err(TokenParseError::UnexpectedStrToken(",2)".to_string()))
  2029. + );
  2030. }
  2031. -}
  2032. \ No newline at end of file
  2033. +}
  2034. From f6b868d04aa3e034f51dfd322e66c75b6d413e5f Mon Sep 17 00:00:00 2001
  2035. From: Simon Gardling <titaniumtown@gmail.com>
  2036. Date: Wed, 16 Feb 2022 15:03:07 -0500
  2037. Subject: [PATCH 07/10] add 'log' function (does the same thing as log10)
  2038. ---
  2039. src/expr.rs | 1 +
  2040. 1 file changed, 1 insertion(+)
  2041. diff --git a/src/expr.rs b/src/expr.rs
  2042. index 04b54b5..ad29be0 100644
  2043. --- a/src/expr.rs
  2044. +++ b/src/expr.rs
  2045. @@ -723,6 +723,7 @@ impl<'a> Context<'a> {
  2046. ctx.func("exp", f64::exp);
  2047. ctx.func("ln", f64::ln);
  2048. ctx.func("log10", f64::log10);
  2049. + ctx.func("log", f64::log10);
  2050. ctx.func("abs", f64::abs);
  2051. ctx.func("sin", f64::sin);
  2052. ctx.func("cos", f64::cos);
  2053. From 00cd188a9a2b98a01b686e03ed9c375b2760a0b0 Mon Sep 17 00:00:00 2001
  2054. From: Simon Gardling <titaniumtown@gmail.com>
  2055. Date: Thu, 17 Feb 2022 11:24:00 -0500
  2056. Subject: [PATCH 08/10] update README.md
  2057. ---
  2058. README.md | 2 +-
  2059. 1 file changed, 1 insertion(+), 1 deletion(-)
  2060. diff --git a/README.md b/README.md
  2061. index 5571842..22d4633 100644
  2062. --- a/README.md
  2063. +++ b/README.md
  2064. @@ -104,7 +104,7 @@ supported:
  2065. - functions implemented using functions of the same name in [Rust std library][std-float]:
  2066. - `sqrt`, `abs`
  2067. - - `exp`, `ln`, `log10`
  2068. + - `exp`, `ln`, `log10` (`log10` can also be called as `log`)
  2069. - `sin`, `cos`, `tan`, `asin`, `acos`, `atan`, `atan2`
  2070. - `sinh`, `cosh`, `tanh`, `asinh`, `acosh`, `atanh`
  2071. - `floor`, `ceil`, `round`
  2072. From 115b1ef88d57ac827f8f62eff97e026455c32791 Mon Sep 17 00:00:00 2001
  2073. From: Simon Gardling <titaniumtown@gmail.com>
  2074. Date: Tue, 22 Feb 2022 10:17:50 -0500
  2075. Subject: [PATCH 09/10] don't specify specific serde version
  2076. ---
  2077. Cargo.toml | 8 ++++----
  2078. 1 file changed, 4 insertions(+), 4 deletions(-)
  2079. diff --git a/Cargo.toml b/Cargo.toml
  2080. index 60b4064..8d62d0d 100644
  2081. --- a/Cargo.toml
  2082. +++ b/Cargo.toml
  2083. @@ -14,13 +14,13 @@ exclude = ["README.tpl", ".travis.yml"]
  2084. [dependencies]
  2085. fnv = "1.0.7"
  2086. nom = "7.1.0"
  2087. -serde = { version = "1.0.136", optional = true }
  2088. +serde = { version = "1", optional = true }
  2089. [dev-dependencies]
  2090. gnuplot = "0.0.37"
  2091. -serde_test = "1.0.136"
  2092. -serde_derive = "1.0.136"
  2093. -serde_json = "1.0.79"
  2094. +serde_test = "1"
  2095. +serde_derive = "1"
  2096. +serde_json = "1"
  2097. toml = "0.5.8"
  2098. [features]
  2099. From 6bf579fd402928745cf4f24e5c975bece3285179 Mon Sep 17 00:00:00 2001
  2100. From: Simon Gardling <titaniumtown@gmail.com>
  2101. Date: Thu, 3 Mar 2022 10:32:47 -0500
  2102. Subject: [PATCH 10/10] fix variable naming
  2103. ---
  2104. src/tokenizer.rs | 8 ++++----
  2105. 1 file changed, 4 insertions(+), 4 deletions(-)
  2106. diff --git a/src/tokenizer.rs b/src/tokenizer.rs
  2107. index 190fa51..9dfd304 100644
  2108. --- a/src/tokenizer.rs
  2109. +++ b/src/tokenizer.rs
  2110. @@ -168,8 +168,8 @@ fn fact<'a>(i: &'a str) -> IResult<&'a str, Token, (&'a str, ErrorKind)> {
  2111. }
  2112. fn ident<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  2113. - let REMAINING_CHARS: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
  2114. - let FIRST_CHARS: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
  2115. + let remaining_chars: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
  2116. + let first_chars: &str = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
  2117. // Returns whole strings matched by the given parser.
  2118. recognize(
  2119. @@ -177,9 +177,9 @@ fn ident<'a>(i: &'a str) -> IResult<&'a str, &'a str, (&'a str, ErrorKind)> {
  2120. // Note that returned ok value of `preceded()` is ignored by `recognize()`.
  2121. preceded(
  2122. // Parses a single character contained in the given string.
  2123. - one_of(FIRST_CHARS),
  2124. + one_of(first_chars),
  2125. // Parses the longest slice consisting of the given characters
  2126. - opt(is_a(REMAINING_CHARS)),
  2127. + opt(is_a(remaining_chars)),
  2128. ),
  2129. )(i)
  2130. }