Source Language
Python
JavaScript
TypeScript
Java
C++
Go
Ruby
PHP
→
Compile Target
Rust
Go
C++
WASM
LLVM IR
MOSM
Source Code — Python input.py
import numpy as np
from dataclasses import dataclass
from typing import List, Optional
# Neural network layer implementation
@dataclass
class DenseLayer:
weights: np.ndarray
bias: np.ndarray
activation: str = "relu"
def forward(self, x: np.ndarray) -> np.ndarray:
z = np.dot(x, self.weights) + self.bias
if self.activation == "relu":
return np.maximum(0, z)
elif self.activation == "sigmoid":
return 1 / (1 + np.exp(-z))
return z
def backward(self, grad: np.ndarray,
x: np.ndarray) -> np.ndarray:
if self.activation == "relu":
mask = (np.dot(x, self.weights) + self.bias) > 0
grad = grad * mask
return np.dot(grad, self.weights.T)
class Network:
def __init__(self, layers: List[DenseLayer]):
self.layers = layers
def predict(self, x: np.ndarray) -> np.ndarray:
for layer in self.layers:
x = layer.forward(x)
return x
Output Code — Rust output.rs
use ndarray::{Array1, Array2};
// Neural network layer implementation
#[derive(Clone)]
pub struct DenseLayer {
weights: Array2<f64>,
bias: Array1<f64>,
activation: String,
}
impl DenseLayer {
pub fn new(weights: Array2<f64>,
bias: Array1<f64>,
activation: &str) -> Self {
Self { weights, bias,
activation: activation.to_string() }
}
pub fn forward(&self, x: &Array1<f64>)
-> Array1<f64> {
let z = x.dot(&self.weights) + &self.bias;
match self.activation.as_str() {
"relu" => z.mapv(|v| v.max(0.0)),
"sigmoid" => z.mapv(|v|
1.0 / (1.0 + (-v).exp())),
_ => z,
}
}
pub fn backward(&self, grad: &Array1<f64>,
x: &Array1<f64>)
-> Array1<f64> {
let g = if self.activation == "relu" {
let z = x.dot(&self.weights) + &self.bias;
let mask = z.mapv(|v|
if v > 0.0 { 1.0 } else { 0.0 });
grad * &mask
} else { grad.clone() };
g.dot(&self.weights.t())
}
}
pub struct Network {
layers: Vec<DenseLayer>,
}
impl Network {
pub fn predict(&self, x: &Array1<f64>)
-> Array1<f64> {
let mut out = x.clone();
for layer in &self.layers {
out = layer.forward(&out);
}
out
}
}
Compilation Progress Complete
1. AST Extraction
Done — 847 nodes
2. Type Inference
Done — 42 types resolved
3. IR Generation
Done — 1,247 instructions
4. Optimization
Done — 3 passes
5. Target Emission
Done — Rust
Compiled in 2.4s |
Lines: 35 → 52 |
Confidence: 97.8%
Diff Viewer — Structural Changes Python → Rust
@@ -1,8 +1,6 @@ Class structure
- @dataclass
- class DenseLayer:
+ #[derive(Clone)]
+ pub struct DenseLayer {
@@ -3,4 +3,6 @@ Fields
- weights: np.ndarray
- bias: np.ndarray
- activation: str = "relu"
+ weights: Array2<f64>,
+ bias: Array1<f64>,
+ activation: String,
@@ -10,4 +12,6 @@ Activation
- if self.activation == "relu":
- return np.maximum(0, z)
+ match self.activation.as_str() {
+ "relu" => z.mapv(|v| v.max(0.0)),
@@ -20,3 +24,4 @@ Ownership
- def predict(self, x):
+ pub fn predict(&self, x: &Array1<f64>)
+ -> Array1<f64> {
Instruction Library Search 522,847 entries
numpy.dot → ndarray::Array::dot
Matrix/vector dot product — Python numpy to Rust ndarray
np.maximum → f64::max / mapv
Element-wise maximum — Python numpy to Rust native
np.exp → f64::exp / mapv
Exponential function — Python numpy to Rust native
@dataclass → #[derive(Clone)]
Data class generation — Python decorator to Rust derive macro
List[T] → Vec<T>
Dynamic array — Python typing to Rust standard
Optional[T] → Option<T>
Nullable type — Python typing to Rust enum
self.method() → self.method()
Method call — Python instance to Rust impl block
for x in iter → for x in &iter
Iteration — Python iterator to Rust borrow