ci: fixing typos programmatically (#5975)

## Description

Adds a CI check for typos and fixes all* typos in code, comments, and
docs.

*found by [typos-cli](https://github.com/crate-ci/typos)

`typos` doesn't catch everything, but it seems to work better than
codespell and cargo-spellcheck (fewer false positives).

## Checklist

- [ ] I have linked to any relevant issues.
- [ ] I have commented my code, particularly in hard-to-understand
areas.
- [ ] I have updated the documentation where relevant (API docs, the
reference, and the Sway book).
- [ ] If my change requires substantial documentation changes, I have
[requested support from the DevRel
team](https://github.com/FuelLabs/devrel-requests/issues/new/choose)
- [ ] I have added tests that prove my fix is effective or that my
feature works.
- [ ] I have added (or requested a maintainer to add) the necessary
`Breaking*` or `New Feature` labels where relevant.
- [ ] I have done my best to ensure that my PR adheres to [the Fuel Labs
Code Review
Standards](https://github.com/FuelLabs/rfcs/blob/master/text/code-standards/external-contributors.md).
- [ ] I have requested a review from the relevant team or maintainers.
This commit is contained in:
Sophie Dankel 2024-05-08 17:21:32 -07:00 committed by GitHub
parent 4a63b41de1
commit f8e8d356be
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
79 changed files with 139 additions and 114 deletions

16
.github/workflows/spellcheck.yml vendored Normal file
View file

@ -0,0 +1,16 @@
name: Spellcheck
on:
pull_request:
jobs:
find-typos:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Check spelling
uses: crate-ci/typos@master
with:
config: .typos.toml

9
.typos.toml Normal file
View file

@ -0,0 +1,9 @@
[default]
extend-ignore-identifiers-re = [
"var*",
"ba",
"TRO",
"tro",
"Tro",
"Nam",
]

View file

@ -35,20 +35,20 @@ forc_path="$SCRIPT_DIR/target/$build_type/forc"
# prepare the benchmark data for commit if requested
if $PREPARE_FOR_COMMIT; then
sway_peformance_data_dir=performance-data
sway_performance_data_dir=performance-data
sway_performance_data_repo_url=git@github.com:FuelLabs/sway-performance-data.git
if [ ! -d "$SCRIPT_DIR/$sway_peformance_data_dir" ]; then
echo "Directory $sway_peformance_data_dir not found. Cloning the repository..."
git clone "$sway_performance_data_repo_url" "$sway_peformance_data_dir"
echo "Repository cloned into $sway_peformance_data_dir."
if [ ! -d "$SCRIPT_DIR/$sway_performance_data_dir" ]; then
echo "Directory $sway_performance_data_dir not found. Cloning the repository..."
git clone "$sway_performance_data_repo_url" "$sway_performance_data_dir"
echo "Repository cloned into $sway_performance_data_dir."
else
echo "Updating sway-performance-data repository..."
git -C "$SCRIPT_DIR/$sway_peformance_data_dir" pull
git -C "$SCRIPT_DIR/$sway_performance_data_dir" pull
fi
mkdir -p "$SCRIPT_DIR/$sway_peformance_data_dir/$GITHUB_SHA"
cp -r $benchmarks_dir/* "$SCRIPT_DIR/$sway_peformance_data_dir/$GITHUB_SHA"
mkdir -p "$SCRIPT_DIR/$sway_performance_data_dir/$GITHUB_SHA"
cp -r $benchmarks_dir/* "$SCRIPT_DIR/$sway_performance_data_dir/$GITHUB_SHA"
else
sway_libs_dir=sway-libs
sway_libs_repo_url=https://github.com/FuelLabs/sway-libs.git

View file

@ -459,7 +459,7 @@ The optimization passes are organized as different pass groups inside a [`PassMa
pass_group.append_pass(RETDEMOTION_NAME);
pass_group.append_pass(MISCDEMOTION_NAME);
// Convert loads and stores to mem_copys where possible.
// Convert loads and stores to mem_copies where possible.
pass_group.append_pass(MEMCPYOPT_NAME);
// Run a DCE and simplify-cfg to clean up any obsolete instructions.

View file

@ -18,7 +18,7 @@ impl Owner {
enum Role {
FullAccess: (),
PartialAcess: (),
PartialAccess: (),
NoAccess: (),
}
// ANCHOR_END: data_structures

View file

@ -16,7 +16,7 @@ If the condition evaluates to the [Boolean](../built-ins/boolean.md) value of `t
On the other hand if the condition evaluates to `false` then we check the next condition, in this case if the `number` is divisible by `4`. We can have as many `else if` checks as we like as long as they evaluate to a Boolean.
At the end there is a special case which is known as a `catch all` case i.e. the `else`. What this means is that we have gone through all of our conditional checks above and none of them have been met. In this scenario we may want to have some special logic to handle a generic case which encompases all the other conditions which we do not care about or can be treated in the same way.
At the end there is a special case which is known as a `catch all` case i.e. the `else`. What this means is that we have gone through all of our conditional checks above and none of them have been met. In this scenario we may want to have some special logic to handle a generic case which encompasses all the other conditions which we do not care about or can be treated in the same way.
## Using if & let together

View file

@ -10,6 +10,6 @@ There are two functions that can be used to mint:
- [`mint()`](./mint.md)
- [`mint_to()`](./address-or-contract.md)
Specific implementation details on transfering assets to addresses can be found [here](./address.md).
Specific implementation details on transferring assets to addresses can be found [here](./address.md).
Specific implementation details on transfering assets to contracts can be found [here](./contract.md).
Specific implementation details on transferring assets to contracts can be found [here](./contract.md).

View file

@ -6,6 +6,6 @@ There is one function that can be used to transfer an asset to any entity:
- [`transfer()`](./address-or-contract.md)
Specific implementation details on transfering assets to addresses can be found [here](./address.md).
Specific implementation details on transferring assets to addresses can be found [here](./address.md).
Specific implementation details on transfering assets to contracts can be found [here](./contract.md).
Specific implementation details on transferring assets to contracts can be found [here](./contract.md).

View file

@ -21,7 +21,7 @@ We'll begin by defining the `Owner` & `Role` data structures and implement a `de
{{#include ../../../code/operations/storage/storage_init/src/main.sw:data_structures}}
```
Now that we have our data structures we'll keep track of how many `current_owners` we have and declare the owner in the two aformentioned styles.
Now that we have our data structures we'll keep track of how many `current_owners` we have and declare the owner in the two aforementioned styles.
```sway
{{#include ../../../code/operations/storage/storage_init/src/main.sw:initialization}}

View file

@ -1,6 +1,6 @@
# StorageVec
A `StorageVec` is a vector that permanently stores its data in `storage`. It replicates the functionality of a regular vector however its data is not stored contigiously because it utilizes hashing and [generics](../../../language/generics/index.md) to find a location to store the value `T`.
A `StorageVec` is a vector that permanently stores its data in `storage`. It replicates the functionality of a regular vector however its data is not stored contiguously because it utilizes hashing and [generics](../../../language/generics/index.md) to find a location to store the value `T`.
There is a number of methods in the [standard library](https://github.com/FuelLabs/sway/blob/master/sway-lib-std/src/storage.sw) however we will take a look at pushing and retrieving data.

View file

@ -1,7 +1,7 @@
library;
fn tuple() {
// You can declare the types youself
// You can declare the types yourself
let tuple1: (u8, bool, u64) = (100, false, 10000);
// Or have the types be inferred

View file

@ -437,7 +437,7 @@ impl PackageManifestFile {
pkg_dir.pop();
if let Some(nested_package) = find_nested_manifest_dir(&pkg_dir) {
// remove file name from nested_package_manifest
bail!("Nested packages are not supported, please consider seperating the nested package at {} from the package at {}, or if it makes sense consider creating a workspace.", nested_package.display(), pkg_dir.display())
bail!("Nested packages are not supported, please consider separating the nested package at {} from the package at {}, or if it makes sense consider creating a workspace.", nested_package.display(), pkg_dir.display())
}
Ok(())
}
@ -875,7 +875,7 @@ impl GenericManifestFile for WorkspaceManifestFile {
// package manifest as a workspace manifest), look into the parent directories for a
// legitimate workspace manifest. If the error returned is something else this is a
// workspace manifest with errors, classify this as a workspace manifest but with
// errors so that the erros will be displayed to the user.
// errors so that the errors will be displayed to the user.
Self::from_file(possible_path)
.err()
.map(|e| !e.to_string().contains("missing field `workspace`"))
@ -956,7 +956,7 @@ impl WorkspaceManifest {
}
// Check for duplicate pkg name entries in member manifests of this workspace.
let duplciate_pkg_lines = pkg_name_to_paths
let duplicate_pkg_lines = pkg_name_to_paths
.iter()
.filter_map(|(pkg_name, paths)| {
if paths.len() > 1 {
@ -970,8 +970,8 @@ impl WorkspaceManifest {
})
.collect::<Vec<_>>();
if !duplciate_pkg_lines.is_empty() {
let error_message = duplciate_pkg_lines.join("\n");
if !duplicate_pkg_lines.is_empty() {
let error_message = duplicate_pkg_lines.join("\n");
bail!(
"Duplicate package names detected in the workspace:\n\n{}",
error_message

View file

@ -384,7 +384,7 @@ impl MemberFilter {
let pkg = &graph[node_ix];
let pkg_manifest = &manifest_map[&pkg.id()];
let program_type = pkg_manifest.program_type();
// Since parser cannot recover for program type detection, for the scenerios that
// Since parser cannot recover for program type detection, for the scenarios that
// parser fails to parse the code, program type detection is not possible. So in
// failing to parse cases we should try to build at least until
// https://github.com/FuelLabs/sway/issues/3017 is fixed. Until then we should
@ -1012,7 +1012,7 @@ fn validate_dep(
Ok(dep_manifest)
}
/// Part of dependency validation, any checks related to the depenency's manifest content.
/// Part of dependency validation, any checks related to the dependency's manifest content.
fn validate_dep_manifest(
dep: &Pinned,
dep_manifest: &PackageManifestFile,
@ -1242,7 +1242,7 @@ fn graph_to_manifest_map(manifests: &MemberManifestFiles, graph: &Graph) -> Resu
/// Assumes the given `graph` only contains valid dependencies (see `validate_graph`).
///
/// `pkg_graph_to_manifest_map` starts from each node (which corresponds to the given proj_manifest)
/// and visits childs to collect their manifest files.
/// and visits children to collect their manifest files.
fn pkg_graph_to_manifest_map(
manifests: &MemberManifestFiles,
pkg_name: &str,
@ -2325,7 +2325,7 @@ pub fn build(
let is_contract_dependency = is_contract_dependency(plan.graph(), node);
// If we are building a contract and tests are enabled or we are building a contract
// dependency, we need the tests exlcuded bytecode.
// dependency, we need the tests excluded bytecode.
let bytecode_without_tests = if (include_tests
&& matches!(manifest.program_type(), Ok(TreeType::Contract)))
|| is_contract_dependency

View file

@ -9,7 +9,7 @@ pub(crate) struct AuthHandler {
impl AuthHandler {
/// Creates a new `AuthHandler` from all fields of the struct. If there are no specific reasons
/// not to, `default_with_config` should be prefered.
/// not to, `default_with_config` should be preferred.
fn new(
config: git2::Config,
ssh_authentication_attempt: bool,

View file

@ -67,7 +67,7 @@ const DEFAULT_REMOTE_NAME: &str = "origin";
/// Everything needed to recognize a checkout in offline mode
///
/// Since we are omiting `.git` folder to save disk space, we need an indexing file
/// Since we are omitting `.git` folder to save disk space, we need an indexing file
/// to recognize a checkout while searching local checkouts in offline mode
#[derive(Serialize, Deserialize)]
pub struct SourceIndex {
@ -377,7 +377,7 @@ fn tmp_git_repo_dir(fetch_id: u64, name: &str, repo: &Url) -> PathBuf {
git_checkouts_directory().join("tmp").join(repo_dir_name)
}
/// Given a git reference, build a list of `refspecs` required for the fetch opration.
/// Given a git reference, build a list of `refspecs` required for the fetch operation.
///
/// Also returns whether or not our reference implies we require fetching tags.
fn git_ref_to_refspecs(reference: &Reference) -> (Vec<String>, bool) {
@ -508,7 +508,7 @@ pub fn commit_path(name: &str, repo: &Url, commit_hash: &str) -> PathBuf {
///
/// Returns the location of the checked out commit.
///
/// NOTE: This function assumes that the caller has aquired an advisory lock to co-ordinate access
/// NOTE: This function assumes that the caller has acquired an advisory lock to co-ordinate access
/// to the git repository checkout path.
pub fn fetch(fetch_id: u64, name: &str, pinned: &Pinned) -> Result<PathBuf> {
let path = commit_path(name, &pinned.source.repo, &pinned.commit_hash);

View file

@ -298,7 +298,7 @@ impl Pinned {
/// If the source is associated with a specific semver version, emit it.
///
/// Used soley for the package lock file.
/// Used solely for the package lock file.
pub fn semver(&self) -> Option<semver::Version> {
match self {
Self::Registry(reg) => Some(reg.source.version.clone()),

View file

@ -41,7 +41,7 @@ pub struct Command {
#[clap(long)]
pub salt: Option<Vec<String>>,
/// Generate a default salt (0x0000000000000000000000000000000000000000000000000000000000000000) for the contract.
/// Useful for CI, to create reproducable deployments.
/// Useful for CI, to create reproducible deployments.
#[clap(long)]
pub default_salt: bool,
#[clap(flatten)]
@ -64,7 +64,7 @@ pub struct Command {
///
/// By default, storage slots are initialized with the values defined in the storage block in
/// the contract. You can override the initialization by providing the file path to a JSON file
/// containing the overriden values.
/// containing the overridden values.
///
/// The file format and key values should match the compiler-generated `*-storage_slots.json` file in the output
/// directory of the compiled contract.

View file

@ -284,7 +284,7 @@ impl HTMLString {
}
}
/// The type of document. Helpful in detemining what to represent in
/// The type of document. Helpful in determining what to represent in
/// the sidebar & page content.
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)]
pub(crate) enum DocStyle {

View file

@ -69,7 +69,7 @@ pub(crate) fn create_preview(raw_attributes: Option<String>) -> Option<String> {
}
/// Checks if some raw html (rendered from markdown) contains a header.
/// If it does, it splits at the header and returns the slice that preceeded it.
/// If it does, it splits at the header and returns the slice that preceded it.
pub(crate) fn split_at_markdown_header(raw_html: &str) -> &str {
for header in HTML_HEADERS {
if raw_html.contains(header) {

View file

@ -71,7 +71,7 @@ pub struct TestResult {
pub state: vm::state::ProgramState,
/// The required state of the VM for this test to pass.
pub condition: pkg::TestPassCondition,
/// Emitted `Recipt`s during the execution of the test.
/// Emitted `Receipt`s during the execution of the test.
pub logs: Vec<fuel_tx::Receipt>,
/// Gas used while executing this test.
pub gas_used: u64,
@ -274,7 +274,7 @@ fn get_contract_dependency_map(
let pinned_member = graph[member_node].clone();
let contract_dependencies = build_plan
.contract_dependencies(member_node)
.map(|contract_depency_node_ix| graph[contract_depency_node_ix].clone())
.map(|contract_dependency_node_ix| graph[contract_dependency_node_ix].clone())
.filter_map(|pinned| built_members.get(&pinned))
.cloned()
.collect::<Vec<_>>();

View file

@ -150,7 +150,7 @@ mod test {
#[test]
fn test_fs_locking_same_process() {
let x = PidFileLocking::lsp("test");
assert!(!x.is_locked()); // checks the non-existance of the lock (therefore it is not locked)
assert!(!x.is_locked()); // checks the non-existence of the lock (therefore it is not locked)
assert!(x.lock().is_ok());
// The current process is locking "test"
let x = PidFileLocking::lsp("test");

View file

@ -122,7 +122,7 @@ pub(crate) enum AllocatedOpcode {
AllocatedRegister,
),
/* Conrol Flow Instructions */
/* Control Flow Instructions */
JMP(AllocatedRegister),
JI(VirtualImmediate24),
JNE(AllocatedRegister, AllocatedRegister, AllocatedRegister),

View file

@ -220,7 +220,7 @@ impl Op {
}
}
/// Dymamically jumps to a register value.
/// Dynamically jumps to a register value.
pub(crate) fn jump_to_register(
reg: VirtualRegister,
comment: impl Into<String>,

View file

@ -164,7 +164,7 @@ impl<'cfg> ControlFlowGraph<'cfg> {
true
} else {
// Consider variables declarations alive when count is greater than 1
// This is explicilty required because the variable may be considered dead
// This is explicitly required because the variable may be considered dead
// when it is not connected from an entry point, while it may still be used by other dead code.
connections_count
.get(n)
@ -182,7 +182,7 @@ impl<'cfg> ControlFlowGraph<'cfg> {
true
} else {
// Consider param alive when count is greater than 1
// This is explicilty required because the param may be considered dead
// This is explicitly required because the param may be considered dead
// when it is not connected from an entry point, while it may still be used by other dead code.
connections_count
.get(n)

View file

@ -276,7 +276,7 @@ pub(crate) fn compile_constants(
// c) ditto for enums.
//
// And for structs and enums in particular, we must ignore those with embedded generic types as
// they are monomorphised only at the instantation site. We must ignore the generic declarations
// they are monomorphised only at the instantiation site. We must ignore the generic declarations
// altogether anyway.
fn compile_declarations(
engines: &Engines,

View file

@ -109,7 +109,7 @@ impl TyStructDecl {
/// within the struct memory layout, or `None` if the field with the
/// name `field_name` does not exist.
pub(crate) fn get_field_index_and_type(&self, field_name: &Ident) -> Option<(u64, TypeId)> {
// TODO-MEMLAY: Warning! This implementation assumes that fields are layed out in
// TODO-MEMLAY: Warning! This implementation assumes that fields are laid out in
// memory in the order of their declaration.
// This assumption can be changed in the future.
self.fields

View file

@ -797,7 +797,7 @@ impl ReplaceDecls for TyExpressionVariant {
// Finds method implementation for method dummy and replaces it.
// This is required because dummy methods don't have type parameters from impl traits.
// Thus we use the implementated method that already contains all the required type parameters,
// Thus we use the implemented method that already contains all the required type parameters,
// including those from the impl trait.
if method.is_trait_method_dummy {
if let Some(implementing_for_typeid) = method.implementing_for_typeid {

View file

@ -900,7 +900,7 @@ pub(crate) fn compile_ast_to_ir_to_asm(
pass_group.append_pass(RETDEMOTION_NAME);
pass_group.append_pass(MISCDEMOTION_NAME);
// Convert loads and stores to mem_copys where possible.
// Convert loads and stores to mem_copies where possible.
pass_group.append_pass(MEMCPYOPT_NAME);
// Run a DCE and simplify-cfg to clean up any obsolete instructions.

View file

@ -214,7 +214,7 @@ where
let name = x.name.as_str();
Some(match &*engines.te().get(x.type_argument.type_id) {
// unit
TypeInfo::Tuple(fiels) if fiels.is_empty() => {
TypeInfo::Tuple(fields) if fields.is_empty() => {
format!("{} => {}::{}, \n", x.tag, enum_name, name)
},
_ => {

View file

@ -440,7 +440,7 @@ impl TyImplTrait {
let mut impl_trait = ty::TyImplTrait {
impl_type_parameters: new_impl_type_parameters,
trait_name,
trait_type_arguments: vec![], // this is empty because impl selfs don't support generics on the "Self" trait,
trait_type_arguments: vec![], // this is empty because impl self's don't support generics on the "Self" trait,
trait_decl_ref: None,
span: block_span,
items: new_items,

View file

@ -223,7 +223,7 @@ where
///
/// ---
///
/// Note that becaues `Range<T>` relies on the assumption that `T` is an
/// Note that because `Range<T>` relies on the assumption that `T` is an
/// integer value, this algorithm joins `Range<T>`s that are within ± 1 of
/// one another. Given these two `Range<T>`s:
///

View file

@ -49,7 +49,7 @@ pub(super) enum ReqOrVarDecl {
/// The tree represents a logical expression that consists of equality comparisons, and
/// lazy AND and OR operators.
///
/// The leafs of the tree are either equality comparisons or eventual variable declarations
/// The leaves of the tree are either equality comparisons or eventual variable declarations
/// or none of those in the case of catch-all `_` pattern or only a single rest `..` in structs.
pub(super) struct ReqDeclTree {
root: ReqDeclNode,

View file

@ -434,7 +434,7 @@ pub(crate) fn type_check_method_application(
method_name_expr: Expression,
_caller: Expression,
arguments: Vec<Expression>,
typed_argumens: Vec<TypeId>,
typed_arguments: Vec<TypeId>,
coins_expr: Expression,
asset_id_expr: Expression,
gas_expr: Expression,
@ -442,7 +442,7 @@ pub(crate) fn type_check_method_application(
let tuple_args_type_id = ctx.engines.te().insert(
ctx.engines,
TypeInfo::Tuple(
typed_argumens
typed_arguments
.iter()
.map(|&type_id| TypeArgument {
type_id,

View file

@ -168,7 +168,7 @@ impl Module {
&self.current_lexical_scope().items
}
/// The mutable collection of items declared by this module's curent lexical scope.
/// The mutable collection of items declared by this module's current lexical scope.
pub fn current_items_mut(&mut self) -> &mut Items {
&mut self.current_lexical_scope_mut().items
}

View file

@ -244,7 +244,7 @@ fn depends_on(
(AstNodeContent::UseStatement(_), AstNodeContent::UseStatement(_)) => false,
(_, AstNodeContent::UseStatement(_)) => true,
// Then declarations, ordered using the dependecies list.
// Then declarations, ordered using the dependencies list.
(AstNodeContent::IncludeStatement(_), AstNodeContent::Declaration(_)) => false,
(AstNodeContent::UseStatement(_), AstNodeContent::Declaration(_)) => false,
(AstNodeContent::Declaration(dependant), AstNodeContent::Declaration(dependee)) => {

View file

@ -1736,7 +1736,7 @@ pub(crate) trait MonomorphizeHelper {
/// }
/// ```
///
/// `EnforeTypeArguments` would require that the type annotations
/// `EnforceTypeArguments` would require that the type annotations
/// for `p1` and `p2` contain `<...>`. This is to avoid ambiguous definitions:
///
/// ```ignore

View file

@ -2752,7 +2752,7 @@ fn path_expr_to_expression(
&& path_expr.suffix.is_empty()
&& path_expr.prefix.generics_opt.is_none()
{
// only `foo`, it coult either be a variable or an enum variant
// only `foo`, it could either be a variable or an enum variant
let name = path_expr_segment_to_ident(context, handler, &path_expr.prefix)?;
Expression {

View file

@ -103,7 +103,7 @@ pub enum TypeInfo {
name: Ident,
// NOTE(Centril): Used to be BTreeSet; need to revert back later. Must be sorted!
trait_constraints: VecSet<TraitConstraint>,
// Methods can have type parameters with unkown generic that extend the trait constraints of a parent unkown generic.
// Methods can have type parameters with unknown generic that extend the trait constraints of a parent unknown generic.
parent: Option<TypeId>,
// This is true when the UnknownGeneric is used in a type parameter.
is_from_type_parameter: bool,

View file

@ -207,7 +207,7 @@ impl fmt::Display for Warning {
),
UninitializedAsmRegShadowsVariable { name } => write!(
f,
"This unitialized register is shadowing a variable, you probably meant to also initialize it like \"{name}: {name}\"."
"This uninitialized register is shadowing a variable, you probably meant to also initialize it like \"{name}: {name}\"."
),
OverridingTraitImplementation => write!(
f,
@ -357,7 +357,7 @@ impl ToDiagnostic for CompileWarning {
_ => Diagnostic {
// TODO: Temporary we use self here to achieve backward compatibility.
// In general, self must not be used and will not be used once we
// switch to our own #[error] macro. All the values for the formating
// switch to our own #[error] macro. All the values for the formatting
// of a diagnostic must come from the enum variant parameters.
issue: Issue::warning(source_engine, self.span(), format!("{}", self.warning_content)),
..Default::default()

View file

@ -68,7 +68,7 @@ impl<'eng> Context<'eng> {
self.source_engine
}
/// Return an interator for every module in this context.
/// Return an iterator for every module in this context.
pub fn module_iter(&self) -> ModuleIterator {
ModuleIterator::new(self)
}

View file

@ -3,7 +3,7 @@
//! Since Sway abstracts most low level operations behind traits they are translated into function
//! calls which contain ASM blocks.
//!
//! Unfortuntely, using opaque ASM blocks limits the effectiveness of certain optimizations and
//! Unfortunately, using opaque ASM blocks limits the effectiveness of certain optimizations and
//! this should be addressed in the future, perhaps by using compiler intrinsic calls instead of
//! the ASM blocks where possible. See: https://github.com/FuelLabs/sway/issues/855,

View file

@ -49,7 +49,7 @@ impl Module {
context.modules[self.0].kind
}
/// Return an interator over each of the [`Function`]s in this module.
/// Return an iterator over each of the [`Function`]s in this module.
pub fn function_iter(&self, context: &Context) -> FunctionIterator {
FunctionIterator::new(context, self)
}

View file

@ -144,7 +144,7 @@ fn remove_dead_blocks(context: &mut Context, function: &Function) -> Result<bool
}
fn merge_blocks(context: &mut Context, function: &Function) -> Result<bool, IrError> {
// Check if block branches soley to another block B, and that B has exactly one predecessor.
// Check if block branches solely to another block B, and that B has exactly one predecessor.
fn check_candidate(context: &Context, from_block: Block) -> Option<(Block, Block)> {
from_block
.get_terminator(context)

View file

@ -18,7 +18,7 @@ script {
ret u64 v1
}
// Three instrutions.
// Three instructions.
fn less_three_blocks(b: bool) -> u64 {
entry(b: bool):
cbr b, then_block(), else_block()

View file

@ -207,7 +207,7 @@ pub fn global_gas() -> u64 {
///
/// # Returns
///
/// * [u64] - The remaining gas for the curren context.
/// * [u64] - The remaining gas for the current context.
///
/// # Examples
///

View file

@ -7,7 +7,7 @@ use lsp_types::CodeActionOrCommand;
use self::auto_import::import_code_action;
use self::qualify::qualify_code_action;
/// Returns a list of [CodeActionOrCommand] based on the relavent compiler diagnostics.
/// Returns a list of [CodeActionOrCommand] based on the relevant compiler diagnostics.
pub(crate) fn code_actions(ctx: &CodeActionContext) -> Option<Vec<CodeActionOrCommand>> {
// Find diagnostics that have attached metadata.
let diagnostics_with_data = ctx.diagnostics.iter().filter_map(|diag| {

View file

@ -100,7 +100,7 @@ pub fn code_actions(
}
pub(crate) trait CodeAction<'a, T: Spanned> {
/// Creates a new [`CodeAction`] with the given [Engines], delcaration type, and [Url].
/// Creates a new [`CodeAction`] with the given [Engines], declaration type, and [Url].
fn new(ctx: &CodeActionContext<'a>, decl: &'a T) -> Self;
/// Returns a [String] of text to insert into the document.

View file

@ -69,7 +69,7 @@ pub fn rename(
}
let mut range = ident.range;
if ident.is_raw_ident() {
// Make sure the start char starts at the begining,
// Make sure the start char starts at the beginning,
// taking the r# tokens into account.
range.start.character -= RAW_IDENTIFIER.len() as u32;
}
@ -154,7 +154,7 @@ fn is_token_in_workspace(
.declared_token_ident(engines)
.ok_or(RenameError::TokenNotFound)?;
// Check the span of the tokens defintions to determine if it's in the users workspace.
// Check the span of the tokens definitions to determine if it's in the users workspace.
let temp_path = &session.sync.temp_dir()?;
if let Some(path) = &decl_ident.path {
if !path.starts_with(temp_path) {

View file

@ -119,7 +119,7 @@ pub fn mark_file_as_dirty(uri: &Url) -> Result<(), LanguageServerError> {
.map_err(|e| DirectoryError::LspLocksDirFailed(e.to_string()))?)
}
/// Removes the corresponding flag file for the specifed Url.
/// Removes the corresponding flag file for the specified Url.
///
/// If the flag file does not exist, this function will do nothing.
pub fn remove_dirty_flag(uri: &Url) -> Result<(), LanguageServerError> {

View file

@ -243,7 +243,7 @@ impl SyncWorkspace {
}
/// Check if the current path is part of the users workspace.
/// Returns false if the path is from a dependancy
/// Returns false if the path is from a dependency
pub(crate) fn is_path_in_temp_workspace(uri: &Url) -> bool {
uri.as_ref().contains(SyncWorkspace::LSP_TEMP_PREFIX)
}
@ -257,8 +257,8 @@ fn convert_url(uri: &Url, from: &Path, to: &PathBuf) -> Result<Url, DirectoryErr
get_url_from_path(&path)
}
/// Deserialize the manifest file and loop through the dependancies.
/// Check if the dependancy is specifying a 'path'.
/// Deserialize the manifest file and loop through the dependencies.
/// Check if the dependency is specifying a 'path'.
/// If so, check if the path is relative and convert the relative path to an absolute path.
/// Edit the toml entry using toml_edit with the absolute path.
/// Save the manifest to temp_dir/Forc.toml.
@ -266,7 +266,7 @@ pub(crate) fn edit_manifest_dependency_paths(
manifest: &PackageManifestFile,
temp_manifest_path: &Path,
) {
// Key = name of the dependancy that has been specified will a relative path
// Key = name of the dependency that has been specified will a relative path
// Value = the absolute path that should be used to overwrite the relateive path
let mut dependency_map: IndexMap<String, PathBuf> = IndexMap::new();

View file

@ -185,7 +185,7 @@ impl Token {
/// This type is used as the key in the [TokenMap]. It's constructed during AST traversal
/// where we compute the [Range] of the token and the convert [SourceId]'s to [PathBuf]'s.
/// Although this introduces a small amount of overhead while traversing, precomputing this
/// greatly speeds up performace in all other areas of the language server.
/// greatly speeds up performance in all other areas of the language server.
///
/// [TokenMap]: crate::core::token_map::TokenMap
/// [SourceId]: sway_types::SourceId

View file

@ -82,7 +82,7 @@ pub enum RenameError {
TokenNotFound,
#[error("Token is not part of the user's workspace")]
TokenNotPartOfWorkspace,
#[error("Keywords and instrinsics are unable to be renamed")]
#[error("Keywords and intrinsics are unable to be renamed")]
SymbolKindNotAllowed,
#[error("Invalid name {:?}: not an identifier", name)]
InvalidName { name: String },

View file

@ -37,7 +37,7 @@ pub fn handle_initialize(
state.spawn_client_heartbeat(client_pid as usize);
}
// Initalizing tracing library based on the user's config
// Initializing tracing library based on the user's config
let config = state.config.read();
if config.logging.level != LevelFilter::OFF {
let tracing_options = TracingSubscriberOptions {
@ -346,7 +346,7 @@ pub(crate) async fn handle_inlay_hints(
///
/// A formatted AST is written to a temporary file and the URI is
/// returned to the client so it can be opened and displayed in a
/// seperate side panel.
/// separate side panel.
pub async fn handle_show_ast(
state: &ServerState,
params: lsp_ext::ShowAstParams,

View file

@ -360,7 +360,7 @@ impl ServerState {
let session = if let Some(item) = self.sessions.try_get(&manifest_dir).try_unwrap() {
item.value().clone()
} else {
// If no session can be found, then we need to call init and inserst a new session into the map
// If no session can be found, then we need to call init and insert a new session into the map
self.init_session(uri).await?;
self.sessions
.try_get(&manifest_dir)

View file

@ -1664,13 +1664,13 @@ fn hover_docs_for_structs() {
test_fixtures_dir().join("tokens/structs/src/main.sw"),
)
.await;
let data_documention = "```sway\nenum Data\n```\n---\n My data enum";
let data_documentation = "```sway\nenum Data\n```\n---\n My data enum";
let mut hover = HoverDocumentation {
req_uri: &uri,
req_line: 12,
req_char: 10,
documentation: vec![data_documention],
documentation: vec![data_documentation],
};
lsp::hover_request(&server, &hover).await;
hover.req_line = 13;
@ -2076,7 +2076,7 @@ lsp_capability_test!(
test_fixtures_dir().join("completion/src/main.sw")
);
// This method iterates over all of the examples in the e2e langauge should_pass dir
// This method iterates over all of the examples in the e2e language should_pass dir
// and saves the lexed, parsed, and typed ASTs to the users home directory.
// This makes it easy to grep for certain compiler types to inspect their use cases,
// providing necessary context when working on the traversal modules.

View file

@ -81,7 +81,7 @@ impl<'a, 'e> Parser<'a, 'e> {
/// This function will fork the current parse, and call the parsing function.
/// If it succeeds it will sync the original parser with the forked one;
///
/// If it fails it will return a `Recoverer` together with the `ErrorEmited`.
/// If it fails it will return a `Recoverer` together with the `ErrorEmitted`.
///
/// This recoverer can be used to put the forked parsed back in track and then
/// sync the original parser to allow the parsing to continue.
@ -127,7 +127,7 @@ impl<'a, 'e> Parser<'a, 'e> {
/// This function will fork the current parse, and try to parse
/// T using the fork. If it succeeds it will sync the original parser with the forked one;
///
/// If it fails it will return a `Recoverer` together with the `ErrorEmited`.
/// If it fails it will return a `Recoverer` together with the `ErrorEmitted`.
///
/// This recoverer can be used to put the forked parsed back in track and then
/// sync the original parser to allow the parsing to continue.
@ -142,7 +142,7 @@ impl<'a, 'e> Parser<'a, 'e> {
/// 2 - it forks the current parser and tries to parse
/// T using this fork. If it succeeds it syncs the original
/// parser with the forked one;
/// 3 - if it fails it will return a `Recoverer` together with the `ErrorEmited`.
/// 3 - if it fails it will return a `Recoverer` together with the `ErrorEmitted`.
///
/// This recoverer can be used to put the forked parsed back in track and then
/// sync the original parser to allow the parsing to continue.
@ -464,7 +464,7 @@ impl<'original, 'a, 'e> ParseRecoveryStrategies<'original, 'a, 'e> {
})
}
/// Starts the parser recovery proces calling the callback with the forked parser.
/// Starts the parser recovery process calling the callback with the forked parser.
/// All the changes to this forked parser will be imposed into the original parser,
/// including diagnostics.
pub fn start<'this>(
@ -506,7 +506,7 @@ impl<'original, 'a, 'e> ParseRecoveryStrategies<'original, 'a, 'e> {
/// This return a span encopassing all tokens that were consumed by the `p` since the start
/// of the tentative parsing
///
/// Thsi is useful to show one single error for all the consumed tokens.
/// This is useful to show one single error for all the consumed tokens.
pub fn diff_span<'this>(&self, p: &Parser<'a, 'this>) -> Span {
let original = self.original.borrow_mut();

View file

@ -95,7 +95,7 @@ impl Formatter {
let src = src.trim();
let path = build_config.map(|build_config| build_config.canonical_root_module());
// Formatted code will be pushed here with raw newline stlye.
// Formatted code will be pushed here with raw newline style.
// Which means newlines are not converted into system-specific versions until `apply_newline_style()`.
// Use the length of src as a hint of the memory size needed for `raw_formatted_code`,
// which will reduce the number of reallocations

View file

@ -92,7 +92,7 @@ impl CurlyBrace for ItemAbi {
line: &mut String,
formatter: &mut Formatter,
) -> Result<(), FormatterError> {
// If shape is becoming left-most alligned or - indent just have the defualt shape
// If shape is becoming left-most aligned or - indent just have the default shape
formatter.unindent();
write!(
line,

View file

@ -152,7 +152,7 @@ impl CurlyBrace for ItemEnum {
line: &mut String,
formatter: &mut Formatter,
) -> Result<(), FormatterError> {
// If shape is becoming left-most aligned or - indent just have the defualt shape
// If shape is becoming left-most aligned or - indent just have the default shape
formatter.unindent();
write!(
line,

View file

@ -98,7 +98,7 @@ impl CurlyBrace for ItemFn {
line: &mut FormattedCode,
formatter: &mut Formatter,
) -> Result<(), FormatterError> {
// If shape is becoming left-most alligned or - indent just have the defualt shape
// If shape is becoming left-most aligned or - indent just have the default shape
formatter.unindent();
write!(
line,

View file

@ -176,7 +176,7 @@ impl CurlyBrace for ItemStruct {
line: &mut String,
formatter: &mut Formatter,
) -> Result<(), FormatterError> {
// If shape is becoming left-most alligned or - indent just have the defualt shape
// If shape is becoming left-most aligned or - indent just have the default shape
formatter.unindent();
write!(
line,

View file

@ -1,7 +1,7 @@
//! Based on `rustfmt`, `swayfmt` aims to be a transparent approach to formatting Sway code.
//!
//! `swayfmt` configurations can be adjusted with a `swayfmt.toml` config file declared at the root of a Sway project,
//! however the defualt formatter does not require the presence of one and any fields omitted will remain as default.
//! however the default formatter does not require the presence of one and any fields omitted will remain as default.
#![allow(dead_code)]
pub mod comments;

View file

@ -156,14 +156,14 @@ where
let p_comment_spans = pair.1.leaf_spans();
// Since we do not want to have comments between T and P we are extending the ByteSpans coming from T with spans coming from P
// Since formatter can insert a trailing comma after a field, comments next to a field can be falsely inserted between the comma and the field
// So we shouldn't allow inserting comments (or searching for one) between T and P as in Punctuated scenerio this can/will result in formattings that breaks the build process
// So we shouldn't allow inserting comments (or searching for one) between T and P as in Punctuated scenario this can/will result in formatting that breaks the build process
let mut comment_spans = pair
.0
.leaf_spans()
.iter_mut()
.map(|comment_map| {
// Since the length of P' ByteSpan is same for each pair we are using the first one's length for all of the pairs.
// This assumtion always holds because for each pair P is formatted to same str so the length is going to be the same.
// This assumption always holds because for each pair P is formatted to same str so the length is going to be the same.
// For example when P is CommaToken, the length of P is always 1.
comment_map.end += p_comment_spans[0].len();
comment_map.clone()

View file

@ -176,7 +176,7 @@ fn add_newlines(
// Since we are adding newline sequences into the formatted code, in the next iteration the spans we find for the formatted code needs to be offsetted
// as the total length of newline sequences we added in previous iterations.
let mut offset = 0;
// We will definetly have a span in the collected span since for a source code to be parsed there should be some tokens present.
// We will definitely have a span in the collected span since for a source code to be parsed there should be some tokens present.
let mut previous_unformatted_newline_span = unformatted_newline_spans
.first()
.ok_or(FormatterError::NewlineSequenceError)?;

View file

@ -6,14 +6,14 @@ pub(crate) mod language;
pub(crate) mod map;
pub(crate) trait CurlyBrace {
/// Handles brace open scenerio. Checks the config for the placement of the brace.
/// Handles brace open scenario. Checks the config for the placement of the brace.
/// Modifies the current shape of the formatter.
fn open_curly_brace(
line: &mut FormattedCode,
formatter: &mut Formatter,
) -> Result<(), FormatterError>;
/// Handles brace close scenerio.
/// Handles brace close scenario.
/// Currently it simply pushes a `}` and modifies the shape.
fn close_curly_brace(
line: &mut FormattedCode,

View file

@ -66,6 +66,6 @@ async fn can_get_predicate_instance() {
let _ = instance.transfer(wallet.address(), 1234, BASE_ASSET_ID, TxPolicies::default()).await;
// Check balances after transfering funds out of predicate
// Check balances after transferring funds out of predicate
check_balances(&wallet, &instance, Some(1_000_000_000u64), Some(0u64)).await;
}

View file

@ -3,7 +3,7 @@
In order to minimize compilation time of individual tests, strive to reduce dependencies in tests.
To achieve that, follow these guidelines:
- Use `implicite-std = false` if dependency on `core` is not needed. This is often possible when testing `should_pass/language` features.
- Use `implicit-std = false` if dependency on `core` is not needed. This is often possible when testing `should_pass/language` features.
- Do not use `std` just to conveniently get an arbitrary type or trait. E.g., if a test requires an arbitrary type or trait, go with `struct Dummy {}` or `trait Trait {}` instead of importing `Option` or `Hash`.
- If `std` functionality is needed, import the minimal [reduced `std` library](reduced_std_libs/README.md) that provides the functionality.
- Import the full `std` only if the provided [reduced `std` libraries](reduced_std_libs/README.md) do not provide required types.

View file

@ -1,5 +1,5 @@
category = "fail"
# check: $()Nested packages are not supported, please consider seperating the nested package at
# check: $()Nested packages are not supported, please consider separating the nested package at
# check: $() from the package at
# check: $(), or if it makes sense consider creating a workspace.

View file

@ -1,8 +1,8 @@
[[package]]
name = "core"
source = "path+from-root-98E5BF1BF05D6FE9"
source = "path+from-root-D4114B7A53E80452"
[[package]]
name = "generic_fn_trait_contraint"
name = "generic_fn_trait_constraint"
source = "member"
dependencies = ["core"]

View file

@ -2,7 +2,7 @@
authors = ["Fuel Labs <contact@fuel.sh>"]
entry = "main.sw"
license = "Apache-2.0"
name = "generic_fn_trait_contraint"
name = "generic_fn_trait_constraint"
implicit-std = false
[dependencies]

View file

@ -43,7 +43,7 @@ fn main() -> u64 {
fn sell_product(product: Product) -> Result<Product, SaleError> {
let mut product = product;
if product.number_available < 1 {
return Result::Err::<Product, SaleError>(SaleError::NotEnoughInventory("noo"));
return Result::Err::<Product, SaleError>(SaleError::NotEnoughInventory("no"));
};
product.number_sold = product.number_sold + 1;
product.number_available = product.number_available - 1;

View file

@ -1,7 +1,7 @@
script;
// 24 bytes
// (8 bytes per elemet)
// (8 bytes per element)
struct Data {
one: u64,
two: u64,

View file

@ -1,6 +1,6 @@
script;
/// Inheritence tree:
/// Inheritance tree:
/// A
/// |
/// |

View file

@ -12,7 +12,7 @@ fn main() {
let recipient_address = Address::from(0x9299da6c73e6dc03eeabcce242bb347de3f5f56cd1c70926d76526d7ed199b8b);
let zero = 0;
let coins = zero;
// `coins:` is indirectly zero, this shouls pass
// `coins:` is indirectly zero, this should pass
caller.send_funds {
gas: 10000,
coins: coins,

View file

@ -12,7 +12,7 @@ abi EnglishAuction {
fn bid(auction_id: u64, bid_asset: AuctionAsset);
#[storage(read, write)]
fn create(bid_asset: AuctionAsset, duration: u64, inital_price: u64, reserve_price: Option<u64>, seller: Identity, sell_asset: AuctionAsset) -> u64;
fn create(bid_asset: AuctionAsset, duration: u64, initial_price: u64, reserve_price: Option<u64>, seller: Identity, sell_asset: AuctionAsset) -> u64;
}
abi NFT {

View file

@ -168,7 +168,7 @@ pub(super) async fn run(
verbose: bool,
mut experimental: ExperimentalFlags,
) -> Result<()> {
// TODO the way moduels are built for these tests, new_encoding is not working.
// TODO the way modules are built for these tests, new_encoding is not working.
experimental.new_encoding = false;
// Compile core library and reuse it when compiling tests.

View file

@ -82,7 +82,7 @@ impl ExperimentalStorageTest for Contract {
assert(storage.nested_map_1.get(1).get(1).get(0).read() == 7);
assert(storage.nested_map_1.get(1).get(1).get(1).read() == 8);
// Thes combinations of keys are not set
// These combinations of keys are not set
assert(storage.nested_map_1.get(2).get(1).get(1).try_read().is_none());
assert(storage.nested_map_1.get(1).get(2).get(1).try_read().is_none());
assert(storage.nested_map_1.get(1).get(1).get(2).try_read().is_none());
@ -115,7 +115,7 @@ impl ExperimentalStorageTest for Contract {
assert(storage.nested_map_2.get((0, 1)).get(_0000).get(0).read() == m1);
assert(storage.nested_map_2.get((0, 1)).get(_0001).get(1).read() == m2);
// Thes combinations of keys are not set
// These combinations of keys are not set
assert(storage.nested_map_2.get((2, 0)).get(_0001).get(1).try_read().is_none());
assert(storage.nested_map_2.get((1, 1)).get(_0002).get(0).try_read().is_none());
assert(storage.nested_map_2.get((1, 1)).get(_0001).get(2).try_read().is_none());
@ -154,7 +154,7 @@ impl ExperimentalStorageTest for Contract {
assert(storage.nested_map_3.get(1).get(m1).get(0).read() == e1);
assert(storage.nested_map_3.get(1).get(m2).get(1).read() == e2);
// Thes combinations of keys are not set
// These combinations of keys are not set
assert(storage.nested_map_3.get(2).get(m2).get(1).try_read().is_none());
assert(
storage