Skip to content

Commit

Permalink
Refactor: remove all traps; return Result of Record
Browse files Browse the repository at this point in the history
nft_get_story returns a StoryRecordResult
.
InferenceRecordResult returned by inference, nft_story_xxx
.
update scripts for InferenceRecordResult
Remove all references to charles
Remove trap calls
Refactor trap in set_canister_mode
initialize returns StatusCodeRecordResult , and does not trap


nft_whitelist, nft_init, nft_mint: no trap; StatusCodeRecordResult
nft_metadata; no trap; NFTCollectionRecordResult
no trap in get_users; UsersRecordResult
fix pytest
no trap in get_user_metadata
StatusCodeRecordResult everywhere; No more traps
Return Err when malloc fails
fix pytest
.
.
  • Loading branch information
icppWorld committed Mar 29, 2024
1 parent 890efac commit 0fba760
Show file tree
Hide file tree
Showing 31 changed files with 896 additions and 425 deletions.
3 changes: 3 additions & 0 deletions icpp_llama2/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@
Invoke-WebRequest -Uri https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -OutFile .\models\stories15M.bin
```

- The demo_pytest.sh script starts the local network, deploys llama2_260K, uploads the model & tokenizer, and runs the QA with pytest:
- `./demo_pytest.sh` , on Linux / Mac

- The *demo* script starts the local network, deploys llama2, uploads the model & tokenizer, and generates two stories:
- `./demo.sh` , on Linux / Mac
- `.\demo.ps1` , in Windows PowerShell (Miniconda recommended)
Expand Down
3 changes: 0 additions & 3 deletions icpp_llama2/canister_ids.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
{
"charles": {
"ic": "lkh5o-3yaaa-aaaag-acguq-cai"
},
"llama2": {
"ic": "4c4bn-daaaa-aaaag-abvcq-cai"
},
Expand Down
19 changes: 18 additions & 1 deletion icpp_llama2/demo.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,30 @@ icpp build-wasm --to-compile all
echo " "
echo "--------------------------------------------------"
echo "Deploying the wasm to a canister on the local network"
dfx deploy
dfx deploy llama2_260K
dfx deploy llama2

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Setting canister_mode to chat-principal"
dfx canister call llama2_260K set_canister_mode chat-principal
dfx canister call llama2 set_canister_mode chat-principal

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Checking health endpoint"
dfx canister call llama2_260K health
dfx canister call llama2 health

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Initializing the canister configurations"
python -m scripts.nft_init --network local --canister llama2_260K --nft-supply-cap 0 --nft-symbol "" --nft-name "" --nft-description ""
python -m scripts.nft_init --network local --canister llama2 --nft-supply-cap 0 --nft-symbol "" --nft-name "" --nft-description ""

#######################################################################
echo " "
echo "--------------------------------------------------"
Expand All @@ -43,6 +59,7 @@ python -m scripts.upload --canister llama2 --model models/stories15M.bin --token
echo " "
echo "--------------------------------------------------"
echo "Checking readiness endpoint"
dfx canister call llama2_260K ready
dfx canister call llama2 ready

#######################################################################
Expand Down
83 changes: 83 additions & 0 deletions icpp_llama2/demo_pytest.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#!/bin/sh

dfx identity use default

#######################################################################
# For Linux & Mac
#######################################################################

echo " "
echo "--------------------------------------------------"
echo "Stopping the local network"
dfx stop

echo " "
echo "--------------------------------------------------"
echo "Starting the local network as a background process"
dfx start --clean --background

#######################################################################
echo "--------------------------------------------------"
echo "Building the wasm with wasi-sdk"
# icpp build-wasm --to-compile all
icpp build-wasm --to-compile mine

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Deploying the wasm to a canister on the local network"
dfx deploy llama2_260K

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Setting canister_mode to chat-principal"
dfx canister call llama2_260K set_canister_mode chat-principal

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Checking health endpoint"
dfx canister call llama2_260K health

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Initializing the canister configurations"
python -m scripts.nft_init --network local --canister llama2_260K --nft-supply-cap 0 --nft-symbol "" --nft-name "" --nft-description ""

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Uploading the model & tokenizer"
python -m scripts.upload --network local --canister llama2_260K --model stories260K/stories260K.bin --tokenizer stories260K/tok512.bin

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Checking readiness endpoint"
dfx canister call llama2_260K ready

#######################################################################
echo " "
echo "--------------------------------------------------"
echo "Running the full smoketests with pytest"
pytest --network=local

#######################################################################
# echo "--------------------------------------------------"
# echo "Stopping the local network"
# dfx stop

# #######################################################################
# echo " "
# echo "--------------------------------------------------"
# echo "Building the OS native debug executable with clang++"
# icpp build-native --to-compile all
# # icpp build-native --to-compile mine

# #######################################################################
# echo " "
# echo "--------------------------------------------------"
# echo "Running the OS native debug executable"
# ./build-native/mockic.exe
5 changes: 0 additions & 5 deletions icpp_llama2/dfx.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,6 @@
"type": "custom",
"candid": "src/llama2.did",
"wasm": "build/llama2.wasm"
},
"charles": {
"type": "custom",
"candid": "src/llama2.did",
"wasm": "build/llama2.wasm"
}
},
"defaults": {
Expand Down
Loading

0 comments on commit 0fba760

Please sign in to comment.