Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
6356dd8
save progress
Synicix Dec 12, 2024
807b348
Merge branch 'pod-job2' into hashing
Synicix Dec 17, 2024
d28a589
Add file hashing ability
Synicix Dec 19, 2024
b92c3f5
Refactor hashing system
Synicix Dec 21, 2024
72b5744
add missing docs
Synicix Dec 21, 2024
a3f9359
Merge branch 'pod-job2' into hashing
Synicix Dec 21, 2024
62ab808
Merge remote-tracking branch 'upstream/dev' into hashing
Synicix Jan 29, 2025
f74088a
Remove store pointer, no tests yet
Synicix Jan 30, 2025
098850c
Rename pod_job's input and output to mapping. Fix bug with look up ha…
Synicix Jan 30, 2025
1d99ae8
Remove internal
Synicix Jan 30, 2025
7ebfdc9
Readd Retry Policy
Synicix Jan 30, 2025
f31f0da
reduce threshold to what is needed
Synicix Feb 1, 2025
56752d9
Allow space in annotation
Synicix Feb 3, 2025
c7921f5
Merge branch 'raphel' into hashing
Synicix Feb 12, 2025
bb2583a
initial fix of merge changes
Synicix Feb 12, 2025
e177177
Merge remote-tracking branch 'upstream/dev' into hashing3
Synicix Feb 19, 2025
e898df5
Added new store pointer impl
Synicix Feb 19, 2025
c06c064
Test orchestrator testing on github
Synicix Feb 20, 2025
31d5399
test orchestrator tests on github actions
Synicix Feb 20, 2025
bd2644c
save progress for now
Synicix Feb 20, 2025
aa05887
Fix syntax error
Synicix Feb 20, 2025
6711fc9
Save progress for now (problem is with orchestrator not using the sto…
Synicix Feb 20, 2025
8af8f3b
Merge branch 'hashing-ind' into hashing
Synicix Feb 25, 2025
29641f3
fixed code, no tests'
Synicix Feb 26, 2025
b76c3f8
merge updates
Synicix Feb 26, 2025
fc9c912
Merge branch 'hashing-ind' into hashing
Synicix Feb 27, 2025
40a5ab6
Merge remote-tracking branch 'upstream/dev' into hashing
Synicix Mar 5, 2025
3debf51
Fix merging issues
Synicix Mar 5, 2025
ce55297
Fix test merging issues, update test to test new store_map, add bette…
Synicix Mar 7, 2025
f3bbe9c
Remove unused imports
Synicix Mar 7, 2025
91e6239
Add store_map_fixture and remove a few stale changes
Synicix Mar 7, 2025
0eb7457
Remove logs and retry policy
Synicix Mar 17, 2025
1d4de87
Merge Raphael's recommendations
Synicix Mar 26, 2025
fed7561
Remove post start and switch to feature for dev container
Synicix Mar 27, 2025
68d3598
Reformat cargo.toml
Synicix Mar 27, 2025
eb439d3
Fix remaining bugs due to change of saving annotation behavior and re…
Synicix Mar 27, 2025
80c6356
Apply review feedback.
guzman-raphael Apr 1, 2025
b198699
Initialize temp directory.
guzman-raphael Apr 1, 2025
1b39aa8
Rename `Folder` to `Directory` and remove index slice.
guzman-raphael Apr 1, 2025
b3108a7
Remove index slice usage and move input binds call up.
guzman-raphael Apr 1, 2025
0aa20fa
Initialize test temp data for GPU DevContainer.
guzman-raphael Apr 1, 2025
4f878d9
Merge pull request #6 from guzman-raphael/Synicix/hashing-new
Synicix Apr 1, 2025
d88e16b
Updated dockerfile to include rsync
Synicix Apr 1, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# DevContainer image
FROM rust:1.84-slim
FROM rust:1.85-slim
RUN \
adduser --system --disabled-password --shell /bin/bash --home /home/vscode vscode && \
# install docker
Expand All @@ -15,7 +15,7 @@ RUN \
RUN \
# dev setup
apt update && \
apt-get install sudo jq git bash-completion graphviz -y && \
apt-get install sudo jq git bash-completion graphviz rsync -y && \
usermod -aG sudo vscode && \
echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers && \
echo '. /etc/bash_completion' >> /home/vscode/.bashrc && \
Expand All @@ -35,4 +35,4 @@ RUN \
rustup component add llvm-tools-preview && \
# rust crate structure diagram
cargo install cargo-modules
CMD ["bash", "-c", "sudo rm /var/run/docker.pid; sudo dockerd"]
CMD ["bash", "-c", "sudo rm /var/run/docker.pid; sudo dockerd"]
4 changes: 2 additions & 2 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}",
"RUST_BACKTRACE": "1"
},
"postStartCommand": "docker system prune -fa && docker volume prune -f",
"postStartCommand": "mkdir -p tests/.tmp && docker system prune -fa && docker volume prune -f",
"hostRequirements": {
"cpus": 2,
"memory": "8gb",
Expand All @@ -39,4 +39,4 @@
]
}
}
}
}
4 changes: 2 additions & 2 deletions .devcontainer/gpu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ RUN \
RUN \
# dev setup
apt update && \
apt-get install build-essential sudo jq git bash-completion -y && \
apt-get install build-essential sudo jq git bash-completion rsync -y && \
echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers && \
echo '. /etc/bash_completion' >> /root/.bashrc && \
echo 'export PS1="\[\e[32;1m\]\u\[\e[m\]@\[\e[34;1m\]\H\[\e[m\]:\[\e[33;1m\]\w\[\e[m\]$ "' >> /root/.bashrc && \
Expand All @@ -34,4 +34,4 @@ RUN apt-get update && curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs
apt-get clean

ENV PATH=${PATH}:/root/.local/bin
CMD ["bash", "-c", "sudo rm /var/run/docker.pid; sudo dockerd"]
CMD ["bash", "-c", "sudo rm /var/run/docker.pid; sudo dockerd"]
7 changes: 5 additions & 2 deletions .devcontainer/gpu/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}",
"RUST_BACKTRACE": "1"
},
"postStartCommand": "docker system prune -fa && docker volume prune -f && apt update && apt install fish -y && chsh -s /usr/bin/fish",
"postStartCommand": "mkdir -p tests/.tmp && docker system prune -fa && docker volume prune -f",
"hostRequirements": {
"cpus": 2,
"memory": "8gb",
Expand All @@ -37,5 +37,8 @@
"streetsidesoftware.code-spell-checker" // Catch spelling errors in docs
]
}
},
"features": {
"ghcr.io/meaningful-ooo/devcontainer-features/fish:2": {} // Fish shell
}
}
}
6 changes: 4 additions & 2 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
- name: Install Rust + components
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
toolchain: 1.84
toolchain: 1.85
components: rustfmt,clippy
- name: Install Rust code coverage
uses: taiki-e/install-action@cargo-llvm-cov
Expand All @@ -22,4 +22,6 @@ jobs:
- name: Run integration tests w/ coverage report
env:
RUST_BACKTRACE: full
run: cargo llvm-cov -- --nocapture
run: |
mkdir -p tests/.tmp
cargo llvm-cov -- --nocapture
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ Cargo.lock
target
lcov.info
*.tar.gz
test_store
*.dot
*.dot
tests/.tmp
2 changes: 1 addition & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -103,4 +103,4 @@
"cwd": "${workspaceFolder}",
},
]
}
}
3 changes: 2 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"files.insertFinalNewline": true,
"notebook.formatOnSave.enabled": true,
"editor.rulers": [
100
Expand All @@ -17,4 +18,4 @@
"gitlens.showWelcomeOnInstall": false,
"gitlens.showWhatsNewAfterUpgrades": false,
"lldb.consoleMode": "evaluate",
}
}
5 changes: 1 addition & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@ glob = "0.3.1" # recursive walk of filesystem in filestore
heck = "0.5.0" # strings to snake_case
names = "0.14.0" # random name generator
regex = "1.11.0" # complex pattern matching in strings
serde = { version = "1.0.210", features = [
"derive",
] } # serialization/deserialization to/from filestore
serde = { version = "1.0.210", features = ["derive"] } # serialization/deserialization to/from filestore
serde_json = "1.0.137" # JSON in sharing memory with local docker orchestrator
serde_yaml = "0.9.34" # YAML in filestore
sha2 = "0.10.8" # checksums based on SHA256
Expand Down Expand Up @@ -76,7 +74,6 @@ float_arithmetic = { level = "allow", priority = 127 } # allow f
host_endian_bytes = { level = "allow", priority = 127 } # allow to_ne_bytes / from_ne_bytes
impl_trait_in_params = { level = "allow", priority = 127 } # impl in params ok
implicit_return = { level = "allow", priority = 127 } # missing return ok
indexing_slicing = { level = "allow", priority = 127 } # allow since guaranteed and error handling is overkill
inline_asm_x86_intel_syntax = { level = "allow", priority = 127 } # intel syntax ok
integer_division = { level = "allow", priority = 127 } # allow discarding remainder
little_endian_bytes = { level = "allow", priority = 127 } # allow to_le_bytes / from_le_bytes
Expand Down
2 changes: 1 addition & 1 deletion cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,4 @@
"peaceiris",
"tintinweb"
]
}
}
42 changes: 35 additions & 7 deletions src/crypto.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,26 @@
use crate::error::Result;
use crate::{
error::Result,
model::{Blob, BlobKind},
util::get,
};
use serde_yaml;
use sha2::{Digest as _, Sha256};
use std::{collections::BTreeMap, fs::File, io::Read, path::Path};

use std::{
collections::{BTreeMap, HashMap},
fs::File,
hash::RandomState,
io::Read,
path::{Path, PathBuf},
};
/// Evaluate checksum hash of streamed data i.e. chunked buffers.
///
/// # Errors
///
/// Will return error if unable to read from stream.
#[expect(
clippy::indexing_slicing,
reason = "Reading less than 0 is impossible."
)]
pub fn hash_stream(stream: &mut impl Read) -> Result<String> {
const BUFFER_SIZE: usize = 8 << 10; // 8KB chunks to match with page size typically found
let mut hash = Sha256::new();
Expand All @@ -22,12 +35,10 @@ pub fn hash_stream(stream: &mut impl Read) -> Result<String> {

Ok(format!("{:x}", hash.finalize()))
}

/// Evaluate checksum hash of raw data in memory.
pub fn hash_buffer(buffer: impl AsRef<[u8]>) -> String {
format!("{:x}", Sha256::digest(buffer.as_ref()))
}

/// Evaluate checksum hash of a file.
///
/// # Errors
Expand All @@ -36,8 +47,7 @@ pub fn hash_buffer(buffer: impl AsRef<[u8]>) -> String {
pub fn hash_file(filepath: impl AsRef<Path>) -> Result<String> {
hash_stream(&mut File::open(filepath)?)
}

/// Evaluate checksum hash of a folder.
/// Evaluate checksum hash of a directory.
///
/// # Errors
///
Expand All @@ -64,3 +74,21 @@ pub fn hash_dir(dirpath: impl AsRef<Path>) -> Result<String> {

Ok(hash_buffer(serde_yaml::to_string(&summary)?))
}
/// Evaluate checksum hash of a blob.
///
/// # Errors
///
/// Will return error if hashing fails on file or directory.
pub fn hash_blob(
namespace_lookup: &HashMap<String, PathBuf, RandomState>,
blob: Blob,
) -> Result<Blob> {
let blob_path = get(namespace_lookup, &blob.location.namespace)?.join(&blob.location.path);
Ok(Blob {
checksum: match blob.kind {
BlobKind::File => hash_file(blob_path)?,
BlobKind::Directory => hash_dir(blob_path)?,
},
..blob
})
}
71 changes: 31 additions & 40 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
use bollard::errors::Error as BollardError;
use colored::Colorize as _;
use glob;
use regex;
use serde_json;
use serde_yaml;
use std::{
Expand All @@ -16,8 +14,16 @@ pub type Result<T> = result::Result<T, OrcaError>;
/// Possible errors you may encounter.
#[derive(Error, Debug)]
pub(crate) enum Kind {
#[error("File `{}` already exists.", path.to_string_lossy().bright_cyan())]
FileExists { path: PathBuf },
#[error("Received an empty response when attempting to load the alternate container image file: {path}.")]
EmptyResponseWhenLoadingContainerAltImage { path: PathBuf },
#[error("Out of generated random names.")]
GeneratedNamesOverflow,
#[error("Path missing a file or directory name: {path}.")]
InvalidPath { path: PathBuf },
#[error("An invalid datetime was set for pod result for pod job (hash: {pod_job_hash}).")]
InvalidPodResultTerminatedDatetime { pod_job_hash: String },
#[error("Key '{key}' was not found in map.")]
KeyMissing { key: String },
#[error("No annotation found for `{name}:{version}` {class}.")]
NoAnnotationFound {
class: String,
Expand All @@ -26,30 +32,22 @@ pub(crate) enum Kind {
},
#[error("No known container names.")]
NoContainerNames,
#[error("Out of generated random names.")]
GeneratedNamesOverflow,
#[error("No corresponding pod run found for pod job (hash: {pod_job_hash}).")]
NoMatchingPodRun { pod_job_hash: String },
#[error("An invalid datetime was set for pod result for pod job (hash: {pod_job_hash}).")]
InvalidPodResultTerminatedDatetime { pod_job_hash: String },
#[error("Received an empty response when attempting to load the alternate container image file: {path}.")]
EmptyResponseWhenLoadingContainerAltImage { path: PathBuf },
#[error("No tags found in provided container alternate image: {path}.")]
NoTagFoundInContainerAltImage { path: PathBuf },
#[error(transparent)]
BollardError(#[from] BollardError),
#[error(transparent)]
GlobPatternError(#[from] glob::PatternError),
#[error(transparent)]
RegexError(#[from] regex::Error),
IoError(#[from] io::Error),
#[error(transparent)]
SerdeYamlError(#[from] serde_yaml::Error),
PathPrefixError(#[from] path::StripPrefixError),
#[error(transparent)]
SerdeJsonError(#[from] serde_json::Error),
#[error(transparent)]
IoError(#[from] io::Error),
#[error(transparent)]
BollardError(#[from] BollardError),
#[error(transparent)]
PathPrefixError(#[from] path::StripPrefixError),
SerdeYamlError(#[from] serde_yaml::Error),
}
/// A stable error API interface.
#[derive(Error, Debug)]
Expand All @@ -71,24 +69,31 @@ impl Display for OrcaError {
write!(f, "{}", self.kind)
}
}
impl From<BollardError> for OrcaError {
fn from(error: BollardError) -> Self {
Self {
kind: Kind::BollardError(error),
}
}
}
impl From<glob::PatternError> for OrcaError {
fn from(error: glob::PatternError) -> Self {
Self {
kind: Kind::GlobPatternError(error),
}
}
}
impl From<regex::Error> for OrcaError {
fn from(error: regex::Error) -> Self {
impl From<io::Error> for OrcaError {
fn from(error: io::Error) -> Self {
Self {
kind: Kind::RegexError(error),
kind: Kind::IoError(error),
}
}
}
impl From<serde_yaml::Error> for OrcaError {
fn from(error: serde_yaml::Error) -> Self {
impl From<path::StripPrefixError> for OrcaError {
fn from(error: path::StripPrefixError) -> Self {
Self {
kind: Kind::SerdeYamlError(error),
kind: Kind::PathPrefixError(error),
}
}
}
Expand All @@ -99,24 +104,10 @@ impl From<serde_json::Error> for OrcaError {
}
}
}
impl From<io::Error> for OrcaError {
fn from(error: io::Error) -> Self {
Self {
kind: Kind::IoError(error),
}
}
}
impl From<BollardError> for OrcaError {
fn from(error: BollardError) -> Self {
Self {
kind: Kind::BollardError(error),
}
}
}
impl From<path::StripPrefixError> for OrcaError {
fn from(error: path::StripPrefixError) -> Self {
impl From<serde_yaml::Error> for OrcaError {
fn from(error: serde_yaml::Error) -> Self {
Self {
kind: Kind::PathPrefixError(error),
kind: Kind::SerdeYamlError(error),
}
}
}
Expand Down
Loading