Skip to content

Commit

Permalink
Release v1.1.1 (#217)
Browse files Browse the repository at this point in the history
Co-authored-by: Samuele Landi <samuele.landi@kryptotel.net>
  • Loading branch information
abhath-labs and samuelelandi committed Mar 15, 2023
1 parent 8b5de4c commit ab98a1b
Show file tree
Hide file tree
Showing 40 changed files with 1,280 additions and 296 deletions.
5 changes: 5 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
.devcontainer
.github
.vscode
scripts
target
29 changes: 29 additions & 0 deletions .github/workflows/build-and-publish-docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: Build and publish a Docker image to ghcr.io
on:

# publish on releases, e.g. v2.1.13 (image tagged as "2.1.13" - "v" prefix is removed)
release:
types: [ published ]

# publish on pushes to the main branch (image tagged as "latest")
push:
branches:
- master

jobs:
docker_publish:
runs-on: "ubuntu-20.04"

steps:
- uses: actions/checkout@v2

# https://github.com/marketplace/actions/push-to-ghcr
- name: Build and publish a Docker image for ${{ github.repository }}
uses: macbre/push-to-ghcr@master
with:
image_name: ${{ github.repository }} # it will be lowercased internally
github_token: ${{ secrets.GITHUB_TOKEN }}
# optionally push to the Docker Hub (docker.io)
# docker_io_token: ${{ secrets.DOCKER_IO_ACCESS_TOKEN }} # see https://hub.docker.com/settings/security
# customize the username to be used when pushing to the Docker Hub
# docker_io_user: foobar # see https://github.com/macbre/push-to-ghcr/issues/14
24 changes: 22 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ on:
pull_request:
branches: [ master, develop ]

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
concurrency:
group: ${{ github.head_ref }}
cancel-in-progress: true

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
Expand Down Expand Up @@ -87,3 +88,22 @@ jobs:

- name: Unit tests
run: cargo test -p bitgreen-parachain --features runtime-benchmarks

build-docker-image:
# The type of runner that the job will run on
runs-on: ubuntu-latest

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
-
name: Set up QEMU
uses: docker/setup-qemu-action@v2
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
-
name: Build Docker Image
uses: docker/build-push-action@v3
with:
push: false
tags: bitgreen/bitgreen-node:latest
4 changes: 4 additions & 0 deletions .github/workflows/codeql-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ on:
schedule:
- cron: '44 18 * * 0'

concurrency:
group: ${{ github.head_ref }}
cancel-in-progress: true

jobs:
analyze:
name: Analyze
Expand Down
6 changes: 6 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

37 changes: 37 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Docker image for bitgreen parachain image

# This is a base image to build substrate nodes
FROM docker.io/paritytech/ci-linux:production as builder

WORKDIR /bitgreen-node
COPY . .

RUN rustup target add wasm32-unknown-unknown
RUN cargo build -p bitgreen-parachain --locked --release

# This is the 2nd stage: a very small image where we copy the binary."
FROM docker.io/library/ubuntu:20.04
LABEL description="Docker image for bitgreen parachain" \
image.type="builder" \
image.authors="team@bitgreen.org" \
image.vendor="Bitgreen" \
image.description="Docker image for bitgreen parachain"

# Copy the node binary.
COPY --from=builder /bitgreen-node/target/release/bitgreen-parachain /usr/local/bin

RUN useradd -m -u 1000 -U -s /bin/sh -d /node-dev node-dev && \
mkdir -p /chain-data /node-dev/.local/share && \
chown -R node-dev:node-dev /chain-data && \
ln -s /chain-data /node-dev/.local/share/bitgreen-node && \
# unclutter and minimize the attack surface
rm -rf /usr/bin /usr/sbin && \
# check if executable works in this container
/usr/local/bin/bitgreen-parachain --version

USER node-dev

EXPOSE 30333 9933 9944 9615
VOLUME ["/chain-data"]

ENTRYPOINT ["/usr/local/bin/bitgreen-parachain"]
1 change: 1 addition & 0 deletions artifacts/testnet/bitgreen-testnet-genesis
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0x000000000000000000000000000000000000000000000000000000000000000000d8cc41ea079083bec601a4bbe5d03f543eb7b3708acf68fd25aa4ac90b7ba61b03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c11131400
71 changes: 71 additions & 0 deletions artifacts/testnet/bitgreen-testnet-raw.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions artifacts/testnet/bitgreen-testnet-wasm

Large diffs are not rendered by default.

1 change: 0 additions & 1 deletion pallets/carbon-credits-pool/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,6 @@ impl pallet_carbon_credits::Config for Test {
type KYCProvider = KYCMembership;
type MarketplaceEscrow = MarketplaceEscrowAccount;
type MaxAuthorizedAccountCount = ConstU32<2>;
type MaxCoordinatesLength = ConstU32<8>;
type MaxDocumentCount = ConstU32<2>;
type MaxGroupSize = MaxGroupSize;
type MaxIpfsReferenceLength = ConstU32<20>;
Expand Down
2 changes: 1 addition & 1 deletion pallets/carbon-credits-pool/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ where
let creation_params = ProjectCreateParams {
name: "name".as_bytes().to_vec().try_into().unwrap(),
description: "description".as_bytes().to_vec().try_into().unwrap(),
location: vec![(1, 1), (2, 2), (3, 3), (4, 4)].try_into().unwrap(),
location: "(1, 1), (2, 2), (3, 3), (4, 4)".as_bytes().to_vec().try_into().unwrap(),
images: vec!["image_link".as_bytes().to_vec().try_into().unwrap()].try_into().unwrap(),
videos: vec!["video_link".as_bytes().to_vec().try_into().unwrap()].try_into().unwrap(),
documents: vec!["document_link".as_bytes().to_vec().try_into().unwrap()]
Expand Down
1 change: 1 addition & 0 deletions pallets/carbon-credits/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ description = "Module to tokenize carbon credits"
targets = ['x86_64-unknown-linux-gnu']

[dependencies]
log = { version = "0.4.17", default-features = false }
bs58 = { default-features = false, version = '0.4.0', features = ['alloc'] }
codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive"] }
frame-support = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.33", default-features = false }
Expand Down
6 changes: 3 additions & 3 deletions pallets/carbon-credits/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ where
let creation_params = ProjectCreateParams {
name: "name".as_bytes().to_vec().try_into().unwrap(),
description: "description".as_bytes().to_vec().try_into().unwrap(),
location: vec![(1, 1), (2, 2), (3, 3), (4, 4)].try_into().unwrap(),
location: "location".as_bytes().to_vec().try_into().unwrap(),
images: vec!["image_link".as_bytes().to_vec().try_into().unwrap()].try_into().unwrap(),
videos: vec!["video_link".as_bytes().to_vec().try_into().unwrap()].try_into().unwrap(),
documents: vec!["document_link".as_bytes().to_vec().try_into().unwrap()]
Expand Down Expand Up @@ -138,7 +138,7 @@ benchmarks! {
CarbonCredits::<T>::approve_project(RawOrigin::Signed(caller.clone()).into(), project_id, true)?;
}: _(RawOrigin::Signed(caller.clone()), project_id, group_id, 100_u32.into(), false)
verify {
assert_last_event::<T>(Event::CarbonCreditMinted { project_id, recipient : caller, amount : 100_u32.into() }.into());
assert_last_event::<T>(Event::CarbonCreditMinted { project_id, group_id, recipient : caller, amount : 100_u32.into() }.into());
}

retire {
Expand All @@ -159,7 +159,7 @@ benchmarks! {
verify {
let item_id : T::ItemId = 0_u32.into();
let retire_data = RetiredCredits::<T>::get(asset_id, item_id).unwrap();
assert_last_event::<T>(Event::CarbonCreditRetired { project_id, account : caller, amount : 10_u32.into(), retire_data :retire_data.retire_data }.into());
assert_last_event::<T>(Event::CarbonCreditRetired { project_id, group_id, asset_id, account : caller, amount : 10_u32.into(), retire_data :retire_data.retire_data }.into());
}

force_add_authorized_account {
Expand Down
7 changes: 5 additions & 2 deletions pallets/carbon-credits/src/functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,10 +282,10 @@ impl<T: Config> Pallet<T> {
approved: false,
};

*project = new_project.clone();
*project = new_project;

// emit event
Self::deposit_event(Event::ProjectResubmitted { project_id, details: new_project });
Self::deposit_event(Event::ProjectResubmitted { project_id });

Ok(())
})
Expand Down Expand Up @@ -366,6 +366,7 @@ impl<T: Config> Pallet<T> {
// emit event
Self::deposit_event(Event::CarbonCreditMinted {
project_id,
group_id,
recipient,
amount: amount_to_mint,
});
Expand Down Expand Up @@ -493,6 +494,8 @@ impl<T: Config> Pallet<T> {
// emit event
Self::deposit_event(Event::CarbonCreditRetired {
project_id,
group_id,
asset_id: group.asset_id,
account: from,
amount,
retire_data: batch_retire_data_list,
Expand Down
27 changes: 18 additions & 9 deletions pallets/carbon-credits/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ mod types;
pub use types::*;

mod functions;
pub mod migration;
pub use functions::*;

mod weights;
Expand Down Expand Up @@ -184,12 +185,12 @@ pub mod pallet {
type MaxLongStringLength: Get<u32>;
/// Maximum length of ipfs reference data
type MaxIpfsReferenceLength: Get<u32>;
/// Maximum amount of location cordinates to store
type MaxCoordinatesLength: Get<u32>;
/// Maximum count of documents for one type
type MaxDocumentCount: Get<u32>;
/// Maximum amount of carbon credits in a bundle
type MaxGroupSize: Get<u32> + TypeInfo + Clone + Parameter;
/// Maximum amount of location cordinates to store
type MaxCoordinatesLength: Get<u32>;
/// Minimum value of AssetId for CarbonCredits
type MinProjectId: Get<Self::AssetId>;
/// Weight information for extrinsics in this pallet.
Expand Down Expand Up @@ -261,8 +262,6 @@ pub mod pallet {
ProjectResubmitted {
/// The ProjectId of the created project
project_id: T::ProjectId,
/// The details of the created project
details: ProjectDetail<T>,
},
/// Project has been approved
ProjectApproved {
Expand All @@ -280,6 +279,8 @@ pub mod pallet {
CarbonCreditMinted {
/// The ProjectId of the minted CarbonCredits
project_id: T::ProjectId,
/// The GroupId of the minted CarbonCredits
group_id: T::GroupId,
/// The AccountId that received the minted CarbonCredits
recipient: T::AccountId,
/// The amount of CarbonCredits units minted
Expand All @@ -289,6 +290,10 @@ pub mod pallet {
CarbonCreditRetired {
/// The ProjectId of the retired CarbonCredits
project_id: T::ProjectId,
/// The GroupId of the CarbonCredits retired
group_id: T::GroupId,
/// The AssetId of the CarbonCredits retired
asset_id: T::AssetId,
/// The AccountId that retired the CarbonCredits
account: T::AccountId,
/// The amount of CarbonCredits units retired
Expand Down Expand Up @@ -571,10 +576,14 @@ pub mod pallet {
}

/// Struct to verify if a given asset_id is representing a carbon credit project
pub struct CarbonCreditsAssetValidator<T>(sp_std::marker::PhantomData<T>);
impl<T: Config> Contains<T::AssetId> for CarbonCreditsAssetValidator<T> {
// Returns true if the AssetId represents a CarbonCredits project
fn contains(asset_id: &T::AssetId) -> bool {
AssetIdLookup::<T>::contains_key(asset_id)
impl<T: Config> primitives::CarbonCreditsValidator for Pallet<T> {
type ProjectId = T::ProjectId;

type GroupId = T::GroupId;

type AssetId = T::AssetId;

fn get_project_details(asset_id: &Self::AssetId) -> Option<(Self::ProjectId, Self::GroupId)> {
AssetIdLookup::<T>::get(asset_id)
}
}
Loading

0 comments on commit ab98a1b

Please sign in to comment.