Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ bellman = { git = "https://github.com/alex-ozdemir/bellman.git", branch = "mirag
rug-polynomial = { version = "0.2.5", optional = true }
ff = { version = "0.12", optional = true }
fxhash = "0.2"
good_lp = { version = "1.1", features = ["lp-solvers", "coin_cbc"], default-features = false, optional = true }
good_lp = {git = "https://github.com/Clive2312/good_lp.git", features = ["lp-solvers", "coin_cbc"], optional = true }
group = { version = "0.12", optional = true }
lp-solvers = { version = "0.0.4", optional = true }
serde_json = "1.0"
Expand All @@ -50,6 +50,7 @@ curve25519-dalek = {version = "3.2.0", features = ["serde"], optional = true}
paste = "1.0"
im = "15"
once_cell = "1"
alphanumeric-sort = "1.5.1"

[dev-dependencies]
quickcheck = "1"
Expand Down
205 changes: 205 additions & 0 deletions examples/C/mpc/benchmarks/kmeans/2pc_kmeans_.c
Original file line number Diff line number Diff line change
@@ -0,0 +1,205 @@
#define D 2 // Dimension (fix)
#define NA 100 // Number of data points from Party A
#define NB 100 // Number of data points from Party B
#define NC 5 // Number of clusters
#define PRECISION 4

#define LEN (NA+NB)
#define LEN_OUTER 10
#define LEN_INNER (LEN/LEN_OUTER)


typedef int coord_t;

struct input_a{
int dataA[D*NA];
};

struct input_b {
int dataB[D*NA];
};

typedef struct
{
coord_t cluster[D*NC];
} Output;


int dist2(int x1, int y1, int x2, int y2) {
return (x1-x2) * (x1-x2) + (y1 - y2) * (y1 - y2);
}

// Computes minimum in a tree based fashion and associated with aux element
int min_with_aux(int *data, int *aux, int len, int stride) {
// if(stride > len) {
// return aux[0];
// } else {
// for(int i = 0; i + stride < len; i+=stride<<1) {
// if(data[i+stride] < data[i]) {
// data[i] = data[i+stride];
// aux[i] = aux[i+stride];
// }std::cout
// }
// return min_with_aux(data, aux, len, stride<<1);
// }
int min = data[0];
int res = 0;
for(int i = 1; i < NC; i++){
if(data[i] < min) {
min = data[i];
res = i;
}
}
return res;
}


#define ADD2(X,A) A[X] + A[X+1]
#define ADD4(X,A) ADD2(X,A) + ADD2(X+2,A)
#define ADD8(X,A) ADD4(X,A) + ADD4(X+4,A)
#define ADD10(X,A) ADD8(X,A) + ADD2(X+8,A)

/**
* Iteration loop unrolled and depth minimized by computing minimum over tree structure
*/
void iteration_unrolled_inner_depth(int *data_inner, int *cluster, int *OUTPUT_cluster, int *OUTPUT_count) {
int i,c;
int dist[NC];
int pos[NC];
int bestMap_inner[LEN_INNER];

for(c = 0; c < NC; c++) {
OUTPUT_cluster[c*D] = 0;
OUTPUT_cluster[c*D+1] = 0;
OUTPUT_count[c] = 0;
}

// Compute nearest clusters for Data item i
for(i = 0; i < LEN_INNER; i++) {
int dx = data_inner[i*D];
int dy = data_inner[i*D+1];
for(c = 0; c < NC; c++) {
pos[c]=c;
dist[c] = dist2(cluster[D*c], cluster[D*c+1], dx, dy);
}
bestMap_inner[i] = min_with_aux(dist, pos, NC, 1);
int cc = bestMap_inner[i];
OUTPUT_cluster[cc*D] += data_inner[i*D];
OUTPUT_cluster[cc*D+1] += data_inner[i*D+1];
OUTPUT_count[cc]++;
}
}

/**
* Iteration unrolled outer loop
*/
void iteration_unrolled_outer(int *data, int *cluster, int *OUTPUT_cluster) {
// int j, c;
int j,c;
// int count[NC];
int count[NC];

// Set Outer result
for(c = 0; c < NC; c++) {
OUTPUT_cluster[c*D] = 0;
OUTPUT_cluster[c*D+1] = 0;
count[c] = 0;
}

// TODO: loop_clusterD1 -- 2d arrays
int loop_clusterD1[NC][LEN_OUTER];
int loop_clusterD2[NC][LEN_OUTER];
// int loop_count[NC][LEN_OUTER];
int loop_count[NC][LEN_OUTER];


// Compute decomposition
for(j = 0; j < LEN_OUTER; j++) {
// Copy data, fasthack for scalability
int data_offset = j*LEN_INNER*D;
int data_inner[LEN_INNER*D];

// memcpy(data_inner, data+data_offset, LEN_INNER*D*sizeof(int));
for (int i = 0; i < LEN_INNER * D; i++)
{
data_inner[i] = data[i + data_offset];
}

int cluster_inner[NC*D];
// int count_inner[NC];
int count_inner[NC];

iteration_unrolled_inner_depth(data_inner, cluster, cluster_inner, count_inner);

// Depth: num_cluster Addition
for(c = 0; c < NC; c++) {
loop_clusterD1[c][j] = cluster_inner[c*D];
loop_clusterD2[c][j] = cluster_inner[c*D+1];
loop_count[c][j] = count_inner[c];
}
}

for(c = 0; c < NC; c++) {
OUTPUT_cluster[c*D] = ADD10(0,loop_clusterD1[c]);
OUTPUT_cluster[c*D+1] = ADD10(0,loop_clusterD2[c]);
count[c] = ADD10(0, loop_count[c]);
}

// Recompute cluster Pos
// Compute mean
for(c = 0; c < NC; c++) {
if(count[c] > 0) {
OUTPUT_cluster[c*D] /= count[c];
OUTPUT_cluster[c*D+1] /= count[c];
}
}
}

void kmeans(int *data, int *OUTPUT_res) {
// int c, p;
int c, p;
int cluster[NC*D];

// Assign random start cluster from data
for(c = 0; c < NC; c++) {
cluster[c*D] = data[((c+3)%LEN)*D];
cluster[c*D+1] = data[((c+3)%LEN)*D+1];
}

for (p = 0; p < PRECISION; p++) {
int new_cluster[NC*D];
iteration_unrolled_outer(data, cluster, new_cluster);
// iteration(data, cluster, new_cluster, len, num_cluster);

// We need to copy inputs to outputs
for( c = 0; c < NC*D; c++) {
cluster[c] = new_cluster[c];
}
}

for(c = 0; c < NC; c++) {
OUTPUT_res[c*D] = cluster[c*D];
OUTPUT_res[c*D+1] = cluster[c*D+1];
}
}


Output main(__attribute__((private(0))) int a[200], __attribute__((private(1))) int b[200])
{
// init data
int data[LEN * D];
for (int i = 0; i < D * NA; i++)
{
data[i] = a[i];
}
int offset = D * NA;
for (int i = 0; i < D * NB; i++)
{
data[i + offset] = b[i];
}

Output output;
kmeans(data, output.cluster);

return output;
}
4 changes: 2 additions & 2 deletions examples/C/mpc/benchmarks/kmeans/2pc_kmeans_og.c
Original file line number Diff line number Diff line change
Expand Up @@ -196,8 +196,8 @@ int main(__attribute__((private(0))) int a[20], __attribute__((private(1))) int
{
data[i + offset] = b[i];
}

struct output output;
struct output output;

kmeans(data, output.cluster);

int sum = 0;
Expand Down
38 changes: 32 additions & 6 deletions examples/circ.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,16 @@ enum Backend {
cost_model: String,
#[arg(long, default_value = "lp", name = "selection_scheme")]
selection_scheme: String,
#[arg(long, default_value = "4000", name = "partition_size")]
partition_size: usize,
#[arg(long, default_value = "4", name = "mutation_level")]
mutation_level: usize,
#[arg(long, default_value = "1", name = "mutation_step_size")]
mutation_step_size: usize,
#[arg(long, default_value = "1", name = "hyper")]
hyper: usize,
#[arg(long, default_value = "3", name = "imbalance")]
imbalance: usize,
},
}

Expand Down Expand Up @@ -238,17 +248,17 @@ fn main() {
opt(
cs,
vec![
Opt::ScalarizeVars,
// Opt::ScalarizeVars,
Opt::Flatten,
Opt::Sha,
Opt::ConstantFold(Box::new(ignore.clone())),
Opt::Flatten,
// Function calls return tuples
Opt::Tuple,
Opt::Obliv,
// Opt::Tuple,
// Opt::Obliv,
// The obliv elim pass produces more tuples, that must be eliminated
Opt::Tuple,
Opt::LinearScan,
// Opt::Tuple,
// Opt::LinearScan,
// The linear scan pass produces more tuples, that must be eliminated
Opt::Tuple,
Opt::ConstantFold(Box::new(ignore)),
Expand Down Expand Up @@ -364,6 +374,11 @@ fn main() {
Backend::Mpc {
cost_model,
selection_scheme,
partition_size,
mutation_level,
mutation_step_size,
hyper,
imbalance,
} => {
println!("Converting to aby");
let lang_str = match language {
Expand All @@ -373,7 +388,18 @@ fn main() {
};
println!("Cost model: {cost_model}");
println!("Selection scheme: {selection_scheme}");
to_aby(cs, &path_buf, &lang_str, &cost_model, &selection_scheme);
to_aby(
cs,
&path_buf,
&lang_str,
&cost_model,
&selection_scheme,
&partition_size,
&mutation_level,
&mutation_step_size,
&hyper,
&imbalance,
);
}
#[cfg(not(feature = "aby"))]
Backend::Mpc { .. } => {
Expand Down
2 changes: 1 addition & 1 deletion examples/opa_bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,6 @@ fn main() {
outputs: vec![term![Op::Eq; t, v]],
..Default::default()
};
let _assignment = ilp::assign(&cs, "hycc");
let _assignment = ilp::assign(&cs.to_cs(), "hycc");
//dbg!(&assignment);
}
8 changes: 4 additions & 4 deletions scripts/aby_tests/c_test_aby.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
ite_tests + \
shift_tests + \
div_tests + \
mod_tests + \
struct_tests + \
ptr_tests + \
c_misc_tests
mod_tests
# struct_tests + \
# ptr_tests + \
# c_misc_tests
# array_tests + \
# c_array_tests + \
# matrix_tests + \
Expand Down
2 changes: 1 addition & 1 deletion scripts/build_kahypar.zsh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

if [[ ! -z ${KAHYPAR_SOURCE} ]]; then
cd ${KAHYPAR_SOURCE}
mkdir build && cd build
mkdir -p build && cd build
cmake .. -DCMAKE_BUILD_TYPE=RELEASE
make
else
Expand Down
Loading