Skip to content

Instantly share code, notes, and snippets.

@ioanSL
Last active April 12, 2023 12:02
Show Gist options
  • Save ioanSL/dde64511fa1ad182439e23944c324f6d to your computer and use it in GitHub Desktop.
Save ioanSL/dde64511fa1ad182439e23944c324f6d to your computer and use it in GitHub Desktop.
Kakarot Memory storage by reference
// AUTHOR: Ioan Oara
// Github: @ioanSL
// Company: ShardLabs
from starkware.cairo.common.serialize import serialize_word
from starkware.cairo.common.uint256 import Uint256
from starkware.cairo.common.alloc import alloc
from starkware.cairo.common.math import assert_le, unsigned_div_rem
from starkware.cairo.common.dict import DictAccess, dict_read, dict_write
from starkware.cairo.common.default_dict import default_dict_new, default_dict_finalize
from starkware.cairo.common.cairo_builtins import HashBuiltin, BitwiseBuiltin
from starkware.cairo.common.bool import FALSE
from starkware.cairo.common.math import split_int, assert_nn
from starkware.cairo.common.math_cmp import is_le
from starkware.cairo.common.memcpy import memcpy
from contracts.utils import Helpers
namespace model {
struct Stack {
word_dict_start: DictAccess*,
word_dict: DictAccess*,
len_16bytes: felt,
}
struct Memory {
word_dict_start: DictAccess*,
word_dict: DictAccess*,
bytes_len: felt,
}
}
namespace Stack {
struct Summary {
len_16bytes: felt,
squashed_start: DictAccess*,
squashed_end: DictAccess*,
}
func init() -> model.Stack* {
alloc_locals;
let (word_dict_start: DictAccess*) = default_dict_new(0);
return new model.Stack(
word_dict_start=word_dict_start, word_dict=word_dict_start, len_16bytes=0
);
}
func finalize{}(self: model.Stack*) -> Summary* {
let (squashed_start, squashed_end) = default_dict_finalize(
self.word_dict_start, self.word_dict, 0
);
return new Summary(
len_16bytes=self.len_16bytes, squashed_start=squashed_start, squashed_end=squashed_end
);
}
func pop{}(self: model.Stack*) -> (new_stack: model.Stack*, element: Uint256) {
let word_dict = self.word_dict;
let position_zero = self.len_16bytes;
if (position_zero == 0) {
with_attr error_message("Kakarot: StackUnderflow") {
assert 1 = 0;
}
}
let (el_high) = dict_read{dict_ptr=word_dict}(position_zero - 2);
let (el_low) = dict_read{dict_ptr=word_dict}(position_zero - 1);
return (
new model.Stack(
word_dict_start=self.word_dict_start,
word_dict=word_dict,
len_16bytes=self.len_16bytes - 2,
),
Uint256(low=el_low, high=el_high),
);
}
func push{}(self: model.Stack*, element: Uint256) -> model.Stack* {
let word_dict = self.word_dict;
let position_zero = self.len_16bytes;
if (position_zero == 1024 * 2 + 2) {
with_attr error_message("Kakarot: StackOverflow") {
assert 1 = 0;
}
}
dict_write{dict_ptr=word_dict}(position_zero, element.high);
dict_write{dict_ptr=word_dict}(position_zero + 1, element.low);
return (
new model.Stack(
word_dict_start=self.word_dict_start,
word_dict=word_dict,
len_16bytes=self.len_16bytes + 2,
)
);
}
func pop_n{}(self: model.Stack*, new_elements: Uint256*, n: felt) -> (
new_stack: model.Stack*
) {
alloc_locals;
let word_dict = self.word_dict;
let position_zero = self.len_16bytes;
let (word_dict) = stack_to_uint256(
word_dict=word_dict, stack_len=position_zero, n=n * 2, output=new_elements
);
// Return Stack with updated Len
let popped_len = 2 * n;
return (
new model.Stack(
word_dict_start=self.word_dict_start,
word_dict=word_dict,
len_16bytes=self.len_16bytes - popped_len,
),
);
}
func stack_to_uint256{}(
word_dict: DictAccess*, stack_len: felt, n: felt, output: Uint256*
) -> (word_dict: DictAccess*) {
if (n == 0) {
return (word_dict=word_dict);
}
// Get Low and High of element at position N
let (el_high) = dict_read{dict_ptr=word_dict}(stack_len - n);
let (el_low) = dict_read{dict_ptr=word_dict}(stack_len - n + 1);
// Save Uint256 value in array
let n_index = n / 2 - 1;
assert output[n_index] = Uint256(low=el_low, high=el_high);
return stack_to_uint256(word_dict=word_dict, stack_len=stack_len, n=n - 2, output=output);
}
}
func add_number(output_ptr: felt*, n: felt, value: felt) -> (output_ptr: felt*) {
if (n == 0) {
return(output_ptr=output_ptr + 1);
}
[ap] = output_ptr, ap++;
%{
print('ap =', ap)
print('[ap - 1] =', memory[ap - 1])
print('[fp] =',fp)
print()
%}
assert [output_ptr] = value;
return add_number(output_ptr=output_ptr + 1, n=n - 1, value=value);
}
namespace Memory {
struct Summary {
squashed_start: DictAccess*,
squashed_end: DictAccess*,
bytes_len: felt,
}
func init() -> model.Memory* {
alloc_locals;
let (word_dict_start: DictAccess*) = default_dict_new(0);
return new model.Memory(
word_dict_start=word_dict_start,
word_dict=word_dict_start,
bytes_len=0);
}
func finalize{}(self: model.Memory*) -> Summary* {
let (squashed_start, squashed_end) = default_dict_finalize(
self.word_dict_start, self.word_dict, 0
);
return new Summary(
bytes_len=self.bytes_len, squashed_start=squashed_start, squashed_end=squashed_end
);
}
func store{}(
self: model.Memory*, element_ptr: felt*, offset: felt
) -> model.Memory* {
let word_dict = self.word_dict;
dict_write{dict_ptr=word_dict}(offset, element_ptr);
return (new model.Memory(
word_dict_start=self.word_dict_start,
word_dict=word_dict,
bytes_len=self.bytes_len + 16,
));
}
func _load{}(self: model.Memory*, offset: felt) -> (model.Memory*, felt*) {
let word_dict = self.word_dict;
let (value) = dict_read{dict_ptr=word_dict}(offset);
return (
new model.Memory(word_dict_start=self.word_dict_start, word_dict=word_dict, bytes_len=self.bytes_len,),
value);
}
func load{}(self: model.Memory*, offset: felt) -> (
new_memory: model.Memory*, loaded_element: felt*
) {
alloc_locals;
let (new_memory, loaded_element) = _load(self=self, offset=offset);
return (new_memory, loaded_element);
}
}
struct ExecutionContext{
stack: model.Stack*,
memory: model.Memory*,
}
func init_ctx() -> ExecutionContext*{
alloc_locals;
let stack = Stack.init();
let memory = Memory.init();
return new ExecutionContext(
stack = stack,
memory = memory
);
}
func update_ctx_stack(
self: ExecutionContext*, new_stack: Stack*
) -> ExecutionContext* {
return new ExecutionContext(
stack=new_stack,
memory=self.memory,
);
}
func update_ctx_memory(
self: ExecutionContext*, new_memory: Memory*
) -> ExecutionContext* {
return new ExecutionContext(
stack=self.stack,
memory=new_memory,
);
}
/// dummy store
func exec_mstore{range_check_ptr}(ctx: ExecutionContext*) -> ExecutionContext* {
alloc_locals;
let (stack_read_values: felt*) = alloc();
let stack = ctx.stack;
// Stack input:
// 0 - offset: memory offset of the work we save.
let (stack) = pop_n(self=stack, stack_read_values, n=2);
let offset=stack_read_values[0];
let memory:Memory* = Memory.store(self=ctx.memory, element=stack_read_values, offset=offset.low);
update_ctx_memory(self=ctx, new_memory=memory);
update_ctx_stack(self=ctx, new_stack=stack);
return ctx;
}
/// dummy load
func exec_mload{range_check_ptr}(ctx: ExecutionContext*) -> ExecutionContext* {
alloc_locals;
let (stack_read_values: felt*) = alloc();
let stack = ctx.stack;
// Stack input:
// 0 - offset: memory offset of the work we save.
let (stack, offset) = pop(self=stack);
local element: felt*;
let (memory, element) = Memory.load(memory, offset);
%{
print('>>>', ids.element)
%}
}
func main{}() {
alloc_locals;
let ctx: ExecutionContext* = int_ctx();
let s: model.Stack = ctx.stack;
let s = push(self=s, val=0x80);
let s = push(self=s, val=0x40);
let ctx = update_ctx_stack(self = ctx, new_stack = s);
let ctx = exec_mstore(ctx = ctx);
let s: model.Stack = ctx.stack;
let s = push(self=s, val=0x40);
let ctx = update_ctx_stack(self = ctx, new_stack = s);
let ctx = exec_mload(ctx = ctx);
return ();
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment