diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0453f00 --- /dev/null +++ b/.gitignore @@ -0,0 +1,112 @@ +# Various IDEs and Editors +.vscode/ +.idea/ +**/*~ +.vessel/ + +# Github Access Token +gittoken.key + +# dApps +dapps-latest-build* +dapps-latest-build.zip + +# Mac OSX temporary files +.DS_Store +**/.DS_Store + +# dfx temporary files +.dfx/ + +# mint script output files +**/__*/ + +# frontend code +node_modules/ +dist/ +src/declarations/ + +seed.prod.txt +seed-prod.txt +seed.txt +identity.prod.pem +identity.pem +projects/bm/0.html +projects/bm/1.html +projects/bm/2.html +projects/bm/3.html +projects/bm/4.html +projects/bm/5.html +projects/bm/6.html +projects/bm/7.html +projects/bm/8.html +projects/bm/9.html +projects/bm/10.html +projects/bm/11.html +projects/bm/12.html +projects/bm/13.html +projects/bm/14.html +projects/bm/15.html +projects/bm/16.html +projects/bm/17.html +projects/bm/18.html +projects/bm/19.html +projects/bm/def_0.json +projects/bm/def_0a.json +projects/bm/def_1.json +projects/bm/def_1a.json +projects/bm/def_2.json +projects/bm/def_2a.json +projects/bm/def_3.json +projects/bm/def_3a.json +projects/bm/def_4.json +projects/bm/def_4a.json +projects/bm/def_5.json +projects/bm/def_5a.json +projects/bm/def_6.json +projects/bm/def_6a.json +projects/bm/def_7.json +projects/bm/def_7a.json +projects/bm/def_8.json +projects/bm/def_8a.json +projects/bm/def_9.json +projects/bm/def_9a.json +projects/bm/def_10.json +projects/bm/def_10a.json +projects/bm/def_11.json +projects/bm/def_11a.json +projects/bm/def_12.json +projects/bm/def_12a.json +projects/bm/def_13.json +projects/bm/def_13a.json +projects/bm/def_14.json +projects/bm/def_14a.json +projects/bm/def_15.json +projects/bm/def_15a.json +projects/bm/def_16.json +projects/bm/def_16a.json +projects/bm/def_17.json +projects/bm/def_17a.json +projects/bm/def_18.json +projects/bm/def_18a.json +projects/bm/def_19.json +projects/bm/def_19a.json +projects/bm/def_collection_build.json +projects/bm/def_collection_1.json +projects/bm/def_collection_2.json +projects/bm/def_collection_loaded.json +projects/bm/def_loaded_1.json +projects/bm/def_loaded_2.json +projects/bm/def_loaded.json +src/origyn_nft_reference_mo/data.mo +src/origyn_nft_reference_mo/dfxtypes.mo +src/origyn_nft_reference_mo/http.mo +src/origyn_nft_reference_mo/main.mo +src/origyn_nft_reference_mo/mint.mo +src/origyn_nft_reference_mo/utils.mo +src/origyn_nft_reference_mo/migrations/lib.mo +src/origyn_nft_reference_mo/migrations/types.mo +src/origyn_nft_reference_mo/migrations/v000_000_000/lib.mo +src/origyn_nft_reference_mo/migrations/v000_000_000/types.mo +src/origyn_nft_reference_mo/migrations/v000_001_000/lib.mo +src/origyn_nft_reference_mo/migrations/v000_001_000/types.mo diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..c1a6c06 --- /dev/null +++ b/LICENSE @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021,2022 Origyn Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +F \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..b5fb844 --- /dev/null +++ b/README.md @@ -0,0 +1,106 @@ +# origyn_nft_reference + +## Getting up and running + +### Testing + +You will need the proper version of yes for your OS. (npm install -g yes) + +yes yes | ./runners/test_runner.sh + +### Produce an idetity for deploying locally + +1. You need to have an identity.pem and a seed.txt in your root directory. You can follow the instructions at https://forum.dfinity.org/t/using-dfinity-agent-in-node-js/6169/50 to produce these file. You should add these files to your git.ignore. + +Navigate to my .dfx identities → ~/.config/.dfx/identity + +Create a new identity → mkdir local-testing; cd local-testing + +Download quill https://github.com/dfinity/quill + +Test that quill is installed correctly → quill + +Look up how to generate a key → quill generate --help + +Generate a key and seed file → quill generate --pem-file identity.pem --seed-file seed.txt + +Copy these files to your root directory and add to git.ignore. + +To run deployment scripts you will also need to produce seed.prod.txt and identity.prod.pem for a deploying identity. + +### You may need a git rest key + +https://docs.github.com/rest + +You can put this key in gittoken.key + +It may be necessary to download the default dapps + +## NFT Canister + +[Overview](./docs/nft.md) + +[NFT Canister API](./docs/nft-current-api.md) + +[NFT Canister Sample Calls](./docs/sample_calls.md) + +[Bad Response Examples](./docs/badresponse.md) + +[Auction Sample Calls and Results](./docs/auction-results.md) + +## Sales Canister + +[Overview](./docs/nft_sale.md) + +## Project Management + +[User Stories](./docs/PM.md) + +## NFT Projects + +The ./projects folder contains NFT project folders which contain NFT assets for minting along with their custom deploy scripts. All deploy scripts should be invoked from the root of the project. For example: + +```bash +yes yes | bash ./projects/bayc/deploy.sh +``` + +Reusable scripts are placed at the root of the ./projects folder. + +## Git Large File Storage + +This project contains video files that are stored in Git LFS. They are now downloaded when you clone the repo. +To download the videos, run the following: + +``` +git lfs install +git lfs fetch +git lfs checkout +``` + +Reference: https://git-lfs.github.com/ + +### deploy.js + +Location: _./projects/deploy.js_. + +Node script that stages and mints NFTs with the input of a JSON metadata file. This script is called by the deployment scripts of some projects under the ./projects folder. + +### csm.js + +Location: _./projects/csm.js_. + +Node script providing the subcommands: _config_, _stage_ and _mint_. This script is called by bash scripts in the _kobe_ and _bayc_ projects and should be called by all new NFT projects. + +The _csm_ script includes the staging and minting logic from _./projects/deploy.js_ organized into subcommands, as well as a subcommand to generate the JSON metadata file. New projects should use this script. Refer to _./projects/kobe/deploy.sh_ and the documentation at the top of _./projects/csm.js_ for usage. + +Currently _csm_ adds all files in the target project folder to a single asset collection. It was created with the idea that it will evolve with additonal functionality. For example, new arguments have been added for minting only a range of NFTs, minting in batches, and combining external CSS/JS references into single HTML files. + +The _config_ subcommand copies all assets from the target folder (-f arg) into a new sibling folder named _\_\_staged_. Any HTML assets are modified to pull in all external CSS or JavaScript references and replace links with exOS urls. A new metadata file named _full_def.json_ is then generated in the _\_\_staged_ folder. This file will be referenced by the _stage_ and _mint_ subcommands. + +Each NFT project has unique requirements and may need custom predeploy and postdeploy scripts. A predeploy script may create HTML files from templates (see _./projects/bayc/predeploy.js_) and a postdeploy script may open the metadata file (_full_def.json_) created by the _config_ subcommand. + +## Testing + +```bash +yes yes | bash ./runners/test_runner.sh +``` diff --git a/dfx.json b/dfx.json new file mode 100644 index 0000000..2cc9c60 --- /dev/null +++ b/dfx.json @@ -0,0 +1,100 @@ +{ + "canisters": { + + "origyn_nft_reference": { + "main": "src/origyn_nft_reference/main.mo", + "type": "motoko" + }, + "origyn_sale_reference": { + "main": "src/origyn_sale_reference/main.mo", + "type": "motoko" + }, + "test_runner": { + "main": "src/tests/test_runner.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_nft": { + "main": "src/tests/test_runner_nft.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_nft_2": { + "main": "src/tests/test_runner_nft_2.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_storage": { + "main": "src/tests/test_runner_storage.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_instant_transfer": { + "main": "src/tests/test_runner_instant_transfer.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_data": { + "main": "src/tests/test_runner_data.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_collection": { + "main": "src/tests/test_runner_collection.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_utils": { + "main": "src/tests/test_runner_utils.mo", + "type": "motoko", + "dependencies": [] + }, + "test_canister_factory": { + "main": "src/tests/canister_creator.mo", + "type": "motoko", + "dependencies": [] + }, + "test_storage_factory": { + "main": "src/tests/storage_creator.mo", + "type": "motoko", + "dependencies": [] + }, + "test_runner_sale": { + "main": "src/tests/test_runner_sale.mo", + "type": "motoko", + "dependencies": [] + }, + "all_ref": { + "main": "src/tests/all_ref.mo", + "type": "motoko", + "dependencies": [] + }, + "dfxledger": { + "type": "custom", + "candid": "src/tests/ledger_dfx_v2.did", + "wasm": "src/tests/ledger-canister-min.wasm" + }, + "dfxledger2": { + "type": "custom", + "candid": "src/tests/ledger_dfx_v2.did", + "wasm": "src/tests/ledger-canister-min.wasm" + } + }, + "defaults": { + "build": { + "args": "", + "packtool": "vessel sources" + }, + "replica": { + "subnet_type": "system" + } + }, + "dfx": "0.11.1", + "networks": { + "local": { + "bind": "127.0.0.1:8000", + "type": "ephemeral" + } + }, + "version": 1 +} diff --git a/docs/nft-current-api.md b/docs/nft-current-api.md new file mode 100644 index 0000000..625067b --- /dev/null +++ b/docs/nft-current-api.md @@ -0,0 +1,1877 @@ + + + +## Balance + +``` +query balance_of_nft_origyn(account: Account) -> Result + + public type Account = { + #principal : Principal; //just a principal and default to null subaccount + #account : {owner: Principal; sub_account: ?Blob}; //for future icrc-1 + #account_id : Text; //raw account id for compatability...some features not available + #extensible : CandyTypes.CandyValue; //for future extensibility + }; + +returns: + +type BalanceResult = { + multiCanister: ?[Principal]; // will hold other canisters that are part of the collection - not yet implemented + nfts: [Text]; //nft ids owned by the user + escrow: [EscrowRecord]; // escrow records that the user has on file + sales: [EscrowRecord]; // sale records that the user has on file + offers: [EscrowRecord]; // offers that have been made + stake: [StakeRecord]; // nyi + }; + +``` + + +* alternative mappings + * query balanceOfDip721(user: principal) -> Nat64; - only supports principals + * query balanceEXT(request: EXTBalanceRequest) -> EXTBalanceResponse; Token Identifier is a text from Principal of [10, 116, 105, 100] + CanisterID as [Nat8] + Nat32 as bytes of Text.hash of token_id as each canister has only one token identifier + +## Owner + +``` + query bearer_nft_origyn(token_id: Text) -> Result + query bearer_batch_nft_origyn(token_id: [Text]) -> [Result] + bearer_secure_nft_origyn(token_id: Text) -> Result + bearer_batch_secure_nft_origyn(token_id: Text) -> Result +``` +returns the owner of the NFT indicated by token_id + +* alternative mappings + * query ownerOfDip721(token_id: Nat) -> DIP721OwnerResult; - will compare Nat64 hash of text token IDs to the token_id + * query ownerOf(token_id: Nat) -> DIP721OwnerResult; - will compare Nat64 hash of text token IDs to the token_id //for questinable "v2" upgrade where the standard is now compatable with fewer web 3 tools + * query bearerEXT(token: TokenIdentifier) -> Result; bearer() also exists for legacy native ext support + * query bearer(token: TokenIdentifier) -> Result; bearer() also exists for legacy native ext support //for legacy support + + +## Transfers + +The origyn NFT supports two types of transfers. Owner Transfers and Market Transfers. + +Owner transfers are meant as a management function for the owners of NFT who needs to move their NFT from one wallet to another. The NFT enforces this policy by transferring not only the NFT, but other Origyn based assets associated with NFT to the new address. Both addresses maintain rights over the nft for a configured time. You should not use this unless you are transfering to a wallet that you own and do not share with anyone. + +Market transfers are the standard way to transact with Origyn NFTs. To help establish true market prices and to protect human ingenuity, reward value creators/originators all transfers of marketable NFTs must go through a public market cycle that ensures that true value is being paid for the asset. As the value flows through the NFT, the NFT implements the revenue sharing built into the NFT. + +### Owner Transfers + +``` + + public type ShareWalletRequest = { + token_id: Text; + from: Account; + to: Account; + }; + + public type OwnerTransferResponse = { + transaction: TransactionRecord; + assets: [CandyTypes.CandyValue]; //assets included in the transfer + }; + + share_wallet_nft_origyn(ShareWalletRequest) -> Result + +``` + +Owner Tranfers moves an NFT from one wallet of an owner to another owner of a wallet. All associated assets should move with the NFT such that an owner would never use this function to transfer an NFT to another user. + +### Market Transfers + +``` + public type MarketTransferRequest = { + token_id: Text; + sales_config: SalesConfig; + }; + + public type SalesConfig = { + escrow_receipt : ?EscrowReceipt; + broker_id : ?Principal; + pricing: PricingConfig; + }; + + public type PricingConfig = { + #instant; //executes an escrow recipt transfer - only available for non-marketable NFTs + #auction: AuctionConfig; + //below have not been signficantly designed or vetted + #flat: { //nyi + token: TokenSpec; + amount: Nat; //Nat to support cycles + }; + #dutch: { + start_price: Nat; + decay_per_hour: Float; + reserve: ?Nat; + }; + #extensible:{ + #candyClass + } + }; + + public type TokenSpec = { + #ic: ICTokenSpec; + #extensible : CandyTypes.CandyValue; //#Class + }; + + public type ICTokenSpec = { + canister: Principal; + fee: Nat; + symbol: Text; + decimals: Nat; + standard: { + #DIP20; //NYI + #Ledger; //OGY and ICP + #EXTFungible; //NYI + #ICRC1; //NYI + }; + }; + + public type AuctionConfig = { + reserve: ?Nat; + token: TokenSpec; + buy_now: ?Nat; + start_price: Nat; + start_date: Int; + ending: { + #date: Int; + #waitForQuiet: { //nyi + date: Int; + extention: Nat64; + fade: Float; + max: Nat + }; + }; + min_increase: { + #percentage: Float; //nyi + #amount: Nat; + }; + allow_list : ?[Principal]; //Result must pass waivers + }; + + market_transfer_nft_origyn(MarketTransferRequest) -> Result + market_transfer_batch_nft_origyn([MarketTransferRequest]) -> [Result] + +``` + +Initiates the market-based transfer of the NFT. + +Currently implemented pricing configs are: + +``` +#instant +``` + +Instant transfers are used to sell an unminted NFT or a direct sale of a minted NFT. They require an escrow to be on file with the canister. The owner of an NFT must be given the escrow receipt and must submit the receipt with the transfer request. + +``` +#auction +``` + +An auction allows users to bid on an NFT until it closes. The winner can then claim the NFT. Bidders must post an escrow for their bids. + +Note: For alternative mappings the existance of an escrow is the approval for the transfer. They use the #instant transfer method under the hood and look up an existing escrow. They use the first escrow they find that matches the to, from, token_id pair. + +* alternative mappings + * transferFromDip721(from: principal, to: principal, tokenAsNat: nat) -> Result; - token_id will be converted from the Nat representation. + * transferFrom(from: principal, to: principal, tokenAsNat: nat) -> Result; - token_id will be converted from the Nat representation. //v2 + * transferDip721(to: principal, tokenAsNat: nat) -> Result; - token_id will be converted from the Nat representation. + * transferEXT(request : EXTTransferRequest) -> EXTTransferResponse; transfer() also exists for legacy native ext support + * transfer(request : EXTTransferRequest) -> EXTTransferResponse; transfer() also exists for legacy native ext support + + +## Minting + + +Stages meta data for an NFT + +``` +stage_nft_origyn({metadata: CandyValue #Class}); - Stages the metadata +``` + +Stages Chunks of Data + +``` +stage_library_nft_origyn(StageChunkArg = { + token_id: Text; + library_id: Text; + filedata: CandyTypes.CandyValue; + chunk: Nat; + content: Bool; //up to 2MB +}) : Result<#ok(bool),#err(OrigynError)>; - Stages the content +``` + +Mints an NFT + +Mints a staged NFT and assigns it to the owner. This a "free" transfer. In the future this may involve a network fee based on the node provider that is minting the item. + +``` +mint_nft_origyn(text:token_id, owner: Account); + +``` + +### NFT Information + +``` +query nft_origyn(id: Text) query -> NFTInfo +nft_secure origyn(id: Text) query -> NFTInfo +query nft_batch_origyn(id: [Text]) -> [NFTInfo] +query nft_batch_secure_origyn(id: [Text]) query -> [NFTInfo] + +``` + +returns data about the nft. + +metatdata - nfts are a class of CandyValues(see below section) +currentSale - if the NFT is for sale it will returne info about the current sale + +``` + { + id: #Text + primary_asset: #Text //id in library + preview: #Text //id in library + experience: #Text// asset to use for the experience of the NFT(typically html) + hidden: #Text //asset to use for the hidden asset before it is minted + library: #Array(#Class({ + library_id: #text //must be unique + title: #text; + location_type; #Text //inCansiter, IPFS, URL, + location; #Text; //http addressable + content_type: #Text; + contentHash: #Bytes + size: #Nat; + sort: #Nat})); + __system: //cannot specify system vars on stage + status: #Text //minted, staged + current_sale_id: #Text //currently running or last run sale for the NFT + __app: + read: #Class{ + {{type: public;} + {type: roles; roles: #Array[#Text]} //nyi + {type: block; roles: #Array[#Principal]} //nyi + {type: allow; roles: #Array[#Principal]} + } + write: #Class{ + {{type: public;} + {type: roles; roles: #Array[#Text]} //nyi + {type: block; roles: #Array[#Principal]} //nyi + {type: allow; roles: #Array[#Principal]} + } + permissions: #Class{ + {type: roles; roles: #Array[#Text]} //nyi + {type: block; roles: #Array[#Principal]} //nyi + {type: allow; roles: #Array[#Principal]} + } + com.app*.data_item: #Class{ + read: #Class{ + {{type: public;} + {type: roles; roles: #Array[#Text]} + {type: block; roles: #Array[#Principal]} + {type: allow; roles: #Array[#Principal]} + } + write: #Class{ + {{type: public;} + {type: roles; roles: #Array[#Text]} + {type: block; roles: #Array[#Principal]} + {type: allow; roles: #Array[#Principal]} + } + } + compute_context: #Class //nyi + context_server: #Principal //nyi + context_menu: [#Class] //nyi + //content for html + owner: #Principal + { "name": "is_soulbound", "value": { "Bool": false },"immutable": false} + {"name":"default_royalty_primary", "value":{"Array":{ //royalties are assigned at the colletion level and then copied to each nft in the system vars. they become immutable except for the network + "thawed": [ + {"Class":[ + {"name":"tag", "value":{"Text":"com.origyn.royalty.broker"}, "immutable":true}, + {"name":"rate", "value":{"Float":0.05}, "immutable":true}, + {"name":"account", "value":{"Principal":"rrkah-fqaaa-aaaaa-aaaaq-cai"}, "immutable":false} + ]}, + {"Class":[ + {"name":"tag", "value":{"Text":"com.origyn.royalty.node"}, "immutable":true}, + {"name":"rate", "value":{"Float":0.005}, "immutable":true}, + {"name":"account", "value":{"Principal":"rrkah-fqaaa-aaaaa-aaaaq-cai"}, "immutable":false} + ]} + ] + }}, "immutable":false}, + {"name":"default_royalty_secondary", "value":{"Array":{ + "thawed": [ + {"Class":[ + {"name":"tag", "value":{"Text":"com.origyn.royalty.broker"}, "immutable":true}, + {"name":"rate", "value":{"Float":0.05}, "immutable":true}, + {"name":"account", "value":{"Principal":"rrkah-fqaaa-aaaaa-aaaaq-cai"}, "immutable":false} + ]}, + {"Class":[ + {"name":"tag", "value":{"Text":"com.origyn.royalty.node"}, "immutable":true}, + {"name":"rate", "value":{"Float":0.005}, "immutable":true}, + {"name":"account", "value":{"Principal":"rrkah-fqaaa-aaaaa-aaaaq-cai"}, "immutable":false} + ]}, + {"Class":[ + {"name":"tag", "value":{"Text":"com.origyn.royalty.originator"}, "immutable":true}, + {"name":"rate", "value":{"Float":0.05}, "immutable":true}, + {"name":"account", "value":{"Principal":"rrkah-fqaaa-aaaaa-aaaaq-cai"}, "immutable":false} + ]}, + {"Class":[ + {"name":"tag", "value":{"Text":"com.origyn.royalty.custom"}, "immutable":true}, + {"name":"rate", "value":{"Float":0.05}, "immutable":true}, + {"name":"account", "value":{"Principal":"rrkah-fqaaa-aaaaa-aaaaq-cai"}, "immutable":false} + ]} + ] + }}, "immutable":false}, + + } +``` + +* alternative mappings + * getMetaDataDip721() -> DIP721MetadataResult query //nyi + * metadataEXT(Text) -> ?Blob - supports metadata() for legacy support. - the collection properties should be converted to a blob standard that a client can decipher(cbor?/protobuf?); //will have to manage multi chunks //nyi + + +### Large NFT Assets + +``` +query chunk_nft_origyn(ChunkRequest = { + token_id: Text; + library_id: Text; + chunk: Nat; + }) query -> Result +``` + +returns chunk of bytes for a resource. #eof will be returned for the last chunk + +* alternative mappings + * DIP721 doesn't seem to currently support pulling chunks + * EXT doesn't seem to support retrieving more than the first chunk. + + +### Data API + +NFTs hold data inside of them on a per app basis. Data can be updated by those apps using the following function. Currently only replace actions are supported: + +``` + + public type NFTUpdateRequest ={ + #replace:{ + token_id: Text; + data: CandyTypes.CandyValue; + }; + #update:{// NYI + token_id: Text; + app_id: Text; + update: CandyTypes.UpdateRequest; + } + }; + +update_app_nft_origyn : shared NFTUpdateRequest -> async Result.Result; + + +read: public/allow/block/roles +write: public/allow/block/roles +permissions: allow/roles -> //perhaps permission changes should be subject to governance? + + + +``` + +We also provide http_request access to this data via the /info endpoint. To access restricted data the user must submit an access token in the query string. + +To get an access token the user can call the following funciton: + +``` + +http_access_key -> Result.Result + +``` + +The returned token can be appened to a url request with the ?access=TOKEN format to see restricted information + +**NOTE: Data stored on the IC should not be considered secure. It is possible(though not probable) that node operators could look at the data at rest and see access tokens. The only current method for hiding data from node providers is to encrypt the data before putting it into a canister. It is highly recommended that any personally identifiable information is encrypted before being stored on a canister with a separate and secure decryption system in place.** + + + + +## Ledger + +Transactions for each NFT are held in an NFT history ledger. The collection ledger is held at the token_id ""(empty string). + +``` + +public type TransactionRecord = { + token_id: Text; + index: Nat; + txn_type: { + #auction_bid : { + buyer: Account; + amount: Nat; + token: TokenSpec; + sale_id: Text; + extensible: CandyTypes.CandyValue; + }; + #mint : { + from: Account; + to: Account; + //nyi: metadata hash + sale: ?{token: TokenSpec; + amount: Nat; //Nat to support cycles + }; + extensible: CandyTypes.CandyValue; + }; + #sale_ended : { + seller: Account; + buyer: Account; + + token: TokenSpec; + sale_id: ?Text; + amount: Nat;//Nat to support cycles + extensible: CandyTypes.CandyValue; + }; + #royalty_paid : { + seller: Account; + buyer: Account; + reciever: Account; + tag: Text; + token: TokenSpec; + sale_id: ?Text; + amount: Nat;//Nat to support cycles + extensible: CandyTypes.CandyValue; + }; + #sale_opened : { + pricing: PricingConfig; + sale_id: Text; + extensible: CandyTypes.CandyValue; + }; + #owner_transfer : { + from: Account; + to: Account; + extensible: CandyTypes.CandyValue; + }; + #escrow_deposit : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat;//Nat to support cycles + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #escrow_withdraw : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat;//Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #sale_withdraw : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat; //Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #canister_owner_updated : { + owner: Principal; + extensible: CandyTypes.CandyValue; + }; + #canister_managers_updated : { + managers: [Principal]; + extensible: CandyTypes.CandyValue; + }; + #canister_network_updated : { + network: Principal; + extensible: CandyTypes.CandyValue; + }; + #data; //nyi + #burn; + #extensible : CandyTypes.CandyValue; + + }; + timestamp: Int; + }; + + + history_nft_origyn : shared query (Text, ?Nat, ?Nat) -> async Result.Result<[TransactionRecord],OrigynError>; + + +``` + +## Sales + +Sales are created using the market_transfer_nft_origyn function. + +Sales are managed through the sale_nft_origyn function and information can be retrieved via query using the sale_info_nft_origyn function. Both methods have a _batch method for multiple requests and the sale_info query has a secure endpoint as well. + + +``` + public type ManageSaleRequest = { + #end_sale : Text; //token_id + #open_sale: Text; //token_id; + #escrow_deposit: EscrowRequest; + #refresh_offers: ?Account; + #bid: BidRequest; + #withdraw: WithdrawRequest; + }; + + public type ManageSaleResponse = { + #end_sale : EndSaleResponse; //trx record if succesful + #open_sale: Bool; //true if opened, false if not; + #escrow_deposit: EscrowResponse; + #refresh_offers: [EscrowRecord]; + #bid: BidResponse; + #withdraw: WithdrawResponse; + }; + + sale_nft_origyn : shared ManageSaleRequest -> async Result.Result; + + public type SaleInfoRequest = { + #active : ?(Nat, Nat); //get al list of active sales + #history : ?(Nat, Nat); //skip, take + #status : Text; //saleID + #deposit_info : ?Account; + }; + + public type SaleInfoResponse = { + #active: { + records: [(Text, ?SaleStatusStable)]; + eof: Bool; + count: Nat}; + #history : { + records: [?SaleStatusStable]; + eof: Bool; + count : Nat}; + #status: ?SaleStatusStable; + #deposit_info: SubAccountInfo; + }; + + sale_info_nft_origyn : shared SaleInfoRequest -> async Result.Result; + +``` + +### Escrow + +All transactions currently require an escrow from the recieving party. Appraised transfers will be supported in the future where node providers can pay the royalties and collect the out of band. + +``` + +To make a deposit the user must ask for the depoisit info first(like an invoice). If the Account is null then the info is returned for the caller. Enough tokens must be sent to cover the deposit + 1 transaction fee; + + public type SubAccountInfo = { + principal : Principal; + account_id : Blob; + account_id_text: Text; + account: { + principal: Principal; + sub_account: Blob; + }; + }; + +sale_info_nft_origyn(#deposit_info(?Account)) -> sync Result.Result; + +Once the tokens are sent to the subaccount on the NFT canister the following is called to claim the deposit: + + public type EscrowRequest = { + token_id : Text; //empty string for general escrow + deposit : DepositDetail; + lock_to_date: ?Int; //timestamp to lock escrow until. + }; + + public type DepositDetail = { + token : TokenSpec; + seller: Account; + buyer : Account; + amount: Nat; //Nat to support cycles; + sale_id: ?Text; + lock_to_date: ?Int + trx_id : ?TransactionID; //null for account based ledgers + }; + + public type EscrowReceipt = { + amount: Nat; //Nat to support cycles + seller: Account; + buyer: Account; + token_id: Text; + token: TokenSpec; + + }; + + public type EscrowResponse = { + receipt: EscrowReceipt; + balance: Nat; //total balance if an existing escrow was added to + transaction: TransactionRecord; + }; + + sale_nft_origyn(#escrow_deposit(EscrowRequest)) -> Result.Result +``` + +### Bids + +During auctions, a user can bid on the NFT using the #bid command. + +``` + public type BidRequest = { + escrow_receipt: EscrowReceipt;//should be returned by the escrow_deposit + sale_id: Text; + broker_id: ?Principal; + }; + + public type BidResponse = TransactionRecord; + + + sale_nft_origyn(ManageSaleRequest(#bid(BidRequest))) -> Result - places a bid on an nft if the escrow receipt is valid. + + +``` + +### Withdrawls + +Allows a user to withdraw their escrowed funds from either an Escrow account or a Sales Receipt, or reject an offer and withdraw the funds back to an offerer. + +``` + public type WithdrawRequest = { + #escrow: WithdrawDescription; + #sale: WithdrawDescription; + #reject: RejectDescription; + }; + + public type WithdrawDescription = { + buyer: Account; + seller: Account; + token_id: Text; + token: TokenSpec; + amount: Nat; + withdraw_to : Account; + }; + + public type RejectDescription = { + buyer: Account; + seller: Account; + token_id: Text; + token: TokenSpec; + }; + + sale_nft_origyn(#withdraw(WithdrawRequest)) -> Result.Result - request a refund of a deposit if possible + +``` + +### Auction Management + +Ends an auction and awards the NFT to the winner and the sales price to the seller. + +``` + + sale_nft_origyn(#open_sale(token_id)) -> async book - open a sale if possible, happens automatically upon bid if made after the sale_date. + + sale_nft_origyn(#end_sale(token_id)) -> ManageSaleResponse(#end_sale : TransactionRecord) - ends a sale if possible, transfers token if possible +``` + +Refreshing Offers - the offers collection can become stale. The following function can be used to refresh the offers collection and make sure that the offers returned by balance_of_nft_origyn are fresh and still active: + + +``` + + sale_nft_origyn(#refresh_offers: ?Account;)) -> #refresh_offers: [EscrowRecord] - refresh the orders and return the list + +``` + + + +## Collection Info + +Passing null to the following function will get you the current in formation about the collection. Individual Field requets and pagination will be added in a futur Release + +``` + collection_nft_origyn : (fields : ?[(Text, ?Nat, ?Nat)]) -> async Result.Result + + public type CollectionInfo = { + fields: ?[(Text, ?Nat, ?Nat)]; + logo: ?Text; + name: ?Text; + symbol: ?Text; + total_supply: ?Nat; + owner: ?Principal; + managers: ?[Principal]; + network: ?Principal; + token_ids: ?[Text]; + token_ids_count: ?Nat; + multi_canister: ?[Principal]; + multi_canister_count: ?Nat; + metadata: ?CandyTypes.CandyValue; + allocated_storage : ?Nat; + available_space : ?Nat; + }; + +``` + +Collection updates are handled witht collection_update_nft_origyn + + +``` + public type ManageCollectionCommand = { + #UpdateManagers : [Principal]; + #UpdateOwner : Principal; + #UpdateNetwork : ?Principal; + #UpdateLogo : ?Text; + #UpdateName : ?Text; + #UpdateSymbol : ?Text; + #UpdateMetadata: (Text, ?CandyTypes.CandyValue, Bool); + }; + + + collection_update_nft_origyn : (ManageCollectionCommand) -> async Result.Result; + collection_update_batch_nft_origyn : ([ManageCollectionCommand]) -> async [Result.Result]; + +``` + +Collections can have their storage increased by manually adding storage canisters to the storage array: + +``` + +public type ManageStorageRequest = { + #add_storage_canisters : [(Principal, Nat, (Nat, Nat, Nat))]; [(Principal of item, Space, version of canister)] + }; + +manage_storage_nft_origyn : shared ManageStorageRequest -> async Result.Result; + + +``` + +Storage info can be pulled with the below + +``` + + public type StorageMetrics = { + allocated_storage: Nat; + available_space: Nat; + allocations: [AllocationRecordStable]; + }; + + public type AllocationRecordStable = { + canister : Principal; + allocated_space: Nat; + available_space: Nat; + chunks: [Nat]; + token_id: Text; + library_id: Text; + }; + + query storage_info_nft_origyn() : async Result.Result + storage_info_secure_nft_origyn() : async Result.Result + +``` + + +## Extensibility + +Returns the supported interfaces for the NFT canister + +``` +__supports() query -> [Text] + [ + ("nft_origyn","v0.1.0"), + ("data_nft_origyn","v0.1.0"), + ("collection_nft_origyn","v0.1.0"), + ("mint_nft_origyn","v0.1.0"), + ("owner_nft_origyn","v0.1.0"), + ("market_nft_origyn","v0.1.0")] + +``` + + + +``` +Features: + nft_origyn + history_nft_origyn + mint_nft_origyn + burn_nft_origyn + notify_nft_origyn +``` + +### nft_origyn + nft_origyn(Text) query -> async Result.Result + nft_secure_origyn(token_id : Text) : async Result.Result + nft_batch_origyn(Text) query -> async [Result.Result] + nft_batch_secure_origyn(token_ids : [Text]) : async [Result.Result] + chunk_nft_origyn(request : Types.ChunkRequest) query -> async Result.Result + balance_of_nft_origyn(account: Types.Account) query -> async Result.Result + balance_of_secure_nft_origyn(account: Types.Account) : async Result.Result + bearer_nft_origyn(Text) query -> async Result.Result + bearer_secure_nft_origyn(token_id : Text) : async Result.Result + bearer_batch_nft_origyn([Text]) query -> async [Result.Result] + bearer_batch_secure_nft_origyn([Text]) query -> async [Result.Result] + get_token_id_as_nat_origyn(Text) query -> async Nat + get_nat_as_token_id_origyn(Nat) query -> async Text + + //standard support + balanceOfDip721(user: Principal) query -> async Nat + balance(request: EXT.BalanceRequest) query -> async EXT.BalanceResponse + balanceEXT(request: EXT.BalanceRequest) query -> async EXT.BalanceResponse + getEXTTokenIdentifier query -> async Text + ownerOfDIP721(tokenAsNat: Nat) query -> async DIP721.OwnerOfResponse + ownerOf(tokenAsNat: Nat) query -> async DIP721.OwnerOfResponse + bearerEXT(tokenIdentifier: EXT.TokenIdentifier) query-> async Result.Result + bearer(tokenIdentifier: EXT.TokenIdentifier) query -> async Result.Result + metadata(token : EXT.TokenIdentifier) query : async Result.Result + +### history_nft_origyn + history_nft_origyn (token_id : Text, start: ?Nat, end: ?Nat) query -> async Result.Result<[Types.TransactionRecord],Types.OrigynError> + +### data_nft_origyn + update_app_nft_origyn : (Types.NFTUpdateRequest) -> async Result.Result + +### collection_nft_origyn + collection_update_origyn(Types.CollectionUpdateRequest) -> async Result.Result + manage_storage_nft_origyn(Types.ManageStorageRequest)->async Result.Result + collection_nft_origyn(fields : ?[(Text,?Nat, ?Nat)]) query -> async Result.Result + storage_info_nft_origyn : shared query () -> async Result.Result + storage_info_secure_nft_origyn() : async Result.Result + +### mint_nft_origyn + stage_nft_origyn({metadata: CandyTypes.CandyValue}) -> async Result.Result + stage_batch_nft_origyn([{metadata: CandyTypes.CandyValue}]) -> async [Result.Result] + stage_library_nft_origyn(Types.StageChunkArg)-> async Result.Result + mint_nft_origyn(token_id: Text, new_owner : Types.Account) -> async Result.Result + mint_batch_nft_origyn([(Text, Types.Account)]) -> async [Result.Result] + +### owner_nft_origyn + share_wallet_nft_origyn(Types.ShareWalletRequest) -> async Result.Result + +### market_nft_origyn + sale_nft_origyn : shared ManageSaleRequest -> async Result.Result; + sale_info_nft_origyn : shared SaleInfoRequest -> async Result.Result; + market_transfer_nft_origyn ( Types.MarketTransferRequest) -> async Result.Result + market_transfer_batch_nft_origyn ( [Types.MarketTransferRequest]) -> async [Result.Result] + transferDip721(from: Principal, to: Principal, tokenAsNat: Nat) -> async DIP721.Result + transferFromDip721(from: Principal, to: Principal, tokenAsNat: Nat) -> async DIP721.Result + transferFrom(from: Principal, to: Principal, tokenAsNat: Nat) -> async DIP721.Result + transferEXT(request: EXT.TransferRequest) -> async EXT.TransferResponse + transfer(request: EXT.TransferRequest) -> async EXT.TransferResponse + + +## Types + +``` + + // migration state + + public type CollectionData = { + var logo: ?Text; + var name: ?Text; + var symbol: ?Text; + var metadata: ?CandyTypes.CandyValue; + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + var allocated_storage: Nat; + var available_space : Nat; + var active_bucket: ?Principal; + }; + + public type BucketData = { + principal : Principal; + var allocated_space: Nat; + var available_space: Nat; + date_added: Int; + b_gateway: Bool; + var version: (Nat, Nat, Nat); + var allocations: Map.Map<(Text,Text), Int>; // (token_id, library_id), Timestamp + }; + + public type AllocationRecord = { + canister : Principal; + allocated_space: Nat; + var available_space: Nat; + var chunks: SB.StableBuffer; + token_id: Text; + library_id: Text; + }; + + public type LogEntry = { + event : Text; + timestamp: Int; + data: CandyTypes.CandyValue; + caller: ?Principal; + }; + + public type SalesSellerTrie = Map.Map>>>; + + + public type SalesBuyerTrie = Map.Map>>; + + public type EscrowBuyerTrie = Map.Map>>>; + + public type EscrowSellerTrie = Map.Map>>; + + public type EscrowTokenIDTrie = Map.Map>; + + public type EscrowLedgerTrie = Map.Map; + + public type Account = { + #principal : Principal; + #account : {owner: Principal; sub_account: ?Blob}; + #account_id : Text; + #extensible : CandyTypes.CandyValue; + }; + + public type EscrowRecord = { + amount: Nat; + buyer: Account; + seller:Account; + token_id: Text; + token: TokenSpec; + sale_id: ?Text; //locks the escrow to a specific sale + lock_to_date: ?Int; //locks the escrow to a timestamp + account_hash: ?Blob; //sub account the host holds the funds in + }; + + public type TokenSpec = { + #ic: ICTokenSpec; + #extensible : CandyTypes.CandyValue; //#Class + }; + + public type ICTokenSpec = { + canister: Principal; + fee: Nat; + symbol: Text; + decimals: Nat; + standard: { + #DIP20; + #Ledger; + #EXTFungible; + #ICRC1; + }; + }; + + public type PricingConfig = { + #instant; //executes an escrow recipt transfer -only available for non-marketable NFTs + #flat: { + token: TokenSpec; + amount: Nat; //Nat to support cycles + }; + //below have not been signficantly desinged or vetted + #dutch: { + start_price: Nat; + decay_per_hour: Float; + reserve: ?Nat; + }; + #auction: AuctionConfig; + #extensible:{ + #candyClass + } + }; + + public type AuctionConfig = { + reserve: ?Nat; + token: TokenSpec; + buy_now: ?Nat; + start_price: Nat; + start_date: Int; + ending: { + #date: Int; + #waitForQuiet: { + date: Int; + extention: Nat64; + fade: Float; + max: Nat + }; + }; + min_increase: { + #percentage: Float; + #amount: Nat; + }; + allow_list : ?[Principal]; + }; + + public type TransactionRecord = { + token_id: Text; + index: Nat; + txn_type: { + #auction_bid : { + buyer: Account; + amount: Nat; + token: TokenSpec; + sale_id: Text; + extensible: CandyTypes.CandyValue; + }; + #mint : { + from: Account; + to: Account; + //nyi: metadata hash + sale: ?{token: TokenSpec; + amount: Nat; //Nat to support cycles + }; + extensible: CandyTypes.CandyValue; + }; + #sale_ended : { + seller: Account; + buyer: Account; + + token: TokenSpec; + sale_id: ?Text; + amount: Nat;//Nat to support cycles + extensible: CandyTypes.CandyValue; + }; + #royalty_paid : { + seller: Account; + buyer: Account; + reciever: Account; + tag: Text; + token: TokenSpec; + sale_id: ?Text; + amount: Nat;//Nat to support cycles + extensible: CandyTypes.CandyValue; + }; + #sale_opened : { + pricing: PricingConfig; + sale_id: Text; + extensible: CandyTypes.CandyValue; + }; + #owner_transfer : { + from: Account; + to: Account; + extensible: CandyTypes.CandyValue; + }; + #escrow_deposit : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat;//Nat to support cycles + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #escrow_withdraw : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat;//Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #sale_withdraw : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat; //Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #canister_owner_updated : { + owner: Principal; + extensible: CandyTypes.CandyValue; + }; + #canister_managers_updated : { + managers: [Principal]; + extensible: CandyTypes.CandyValue; + }; + #canister_network_updated : { + network: Principal; + extensible: CandyTypes.CandyValue; + }; + #data; //nyi + #burn; + #extensible : CandyTypes.CandyValue; + + }; + timestamp: Int; + }; + + //used to identify the transaction in a remote ledger; usually a nat on the IC + public type TransactionID = { + #nat : Nat; + #text : Text; + #extensible : CandyTypes.CandyValue + }; + + public type SaleStatus = { + sale_id: Text; //sha256?; + original_broker_id: ?Principal; + broker_id: ?Principal; + token_id: Text; + sale_type: { + #auction: AuctionState; + }; + }; + + public type EscrowReceipt = { + amount: Nat; //Nat to support cycles + seller: Account; + buyer: Account; + token_id: Text; + token: TokenSpec; + + }; + + public type AuctionState = { + config: PricingConfig; + var current_bid_amount: Nat; + var current_broker_id: ?Principal; + var end_date: Int; + var min_next_bid: Nat; + var current_escrow: ?EscrowReceipt; + var wait_for_quiet_count: ?Nat; + var allow_list: ?Map.Map; //empty set means everyone + var participants: Map.Map; + var status: { + #open; + #closed; + #not_started; + }; + var winner: ?Account; + }; + + + //types + + public type InitArgs = { + owner: Principal.Principal; + storage_space: ?Nat; + }; + + public type StorageInitArgs = { + gateway_canister: Principal; + network: ?Principal; + storage_space: ?Nat; + }; + + public type StorageMigrationArgs = { + gateway_canister: Principal; + network: ?Principal; + storage_space: ?Nat; + caller: Principal; + }; + + public type ManageCollectionCommand = { + #UpdateManagers : [Principal]; + #UpdateOwner : Principal; + #UpdateNetwork : ?Principal; + #UpdateLogo : ?Text; + #UpdateName : ?Text; + #UpdateSymbol : ?Text; + #UpdateMetadata: (Text, ?CandyTypes.CandyValue, Bool); + }; + + // RawData type is a tuple of Timestamp, Data, and Principal + public type RawData = (Int, Blob, Principal); + + public type HttpRequest = { + body: Blob; + headers: [HeaderField]; + method: Text; + url: Text; + }; + + public type StreamingCallbackToken = { + content_encoding: Text; + index: Nat; + key: Text; + //sha256: ?Blob; + }; + public type StreamingCallbackHttpResponse = { + body: Blob; + token: ?StreamingCallbackToken; + }; + public type ChunkId = Nat; + public type SetAssetContentArguments = { + chunk_ids: [ChunkId]; + content_encoding: Text; + key: Key; + sha256: ?Blob; + }; + public type Path = Text; + public type Key = Text; + + public type HttpResponse = { + body: Blob; + headers: [HeaderField]; + status_code: Nat16; + streaming_strategy: ?StreamingStrategy; + }; + + public type StreamingStrategy = { + #Callback: { + callback: shared () -> async (); + token: StreamingCallbackToken; + }; + }; + + public type HeaderField = (Text, Text); + + public type canister_id = Principal; + + public type definite_canister_settings = { + freezing_threshold : Nat; + controllers : ?[Principal]; + memory_allocation : Nat; + compute_allocation : Nat; + }; + + public type canister_status = { + status : { #stopped; #stopping; #running }; + memory_size : Nat; + cycles : Nat; + settings : definite_canister_settings; + module_hash : ?[Nat8]; + }; + + public type IC = actor { + canister_status : { canister_id : canister_id } -> async canister_status; + }; + + public type StageChunkArg = { + token_id: Text; + library_id: Text; + filedata: CandyTypes.CandyValue; + chunk: Nat; + content: Blob; + }; + + + public type ChunkRequest = { + token_id: Text; + library_id: Text; + chunk: ?Nat; + }; + + public type ChunkContent = { + #remote : { + canister: Principal; + args: ChunkRequest; + }; + #chunk : { + content: Blob; + total_chunks: Nat; + current_chunk: ?Nat; + storage_allocation: AllocationRecordStable; + }; + }; + + public type MarketTransferRequest = { + token_id: Text; + sales_config: SalesConfig; + }; + + public type OwnerTransferResponse = { + transaction: TransactionRecord; + assets: [CandyTypes.CandyValue]; + }; + + public type ShareWalletRequest = { + token_id: Text; + from: Account; + to: Account; + }; + + public type SalesConfig = { + escrow_receipt : ?EscrowReceipt; + broker_id : ?Principal; + pricing: PricingConfig; + }; + + public type ICTokenSpec = MigrationTypes.Current.ICTokenSpec; + + public type TokenSpec = MigrationTypes.Current.TokenSpec; + + public let TokenSpecDefault = #extensible(#Empty); + + + //nyi: anywhere a deposit address is used, check blob for size in inspect message + public type SubAccountInfo = { + principal : Principal; + account_id : Blob; + account_id_text: Text; + account: { + principal: Principal; + sub_account: Blob; + }; + }; + + public type EscrowReceipt = MigrationTypes.Current.EscrowReceipt; + + public type EscrowRequest = { + token_id : Text; //empty string for general escrow + deposit : DepositDetail; + lock_to_date: ?Int; //timestamp to lock escrow until. + }; + + public type DepositDetail = { + token : TokenSpec; + seller: Account; + buyer : Account; + amount: Nat; //Nat to support cycles; + sale_id: ?Text; + trx_id : ?TransactionID; //null for account based ledgers + }; + + //used to identify the transaction in a remote ledger; usually a nat on the IC + public type TransactionID = MigrationTypes.Current.TransactionID; + + public type EscrowResponse = { + receipt: EscrowReceipt; + balance: Nat; + transaction: TransactionRecord; + }; + + public type BidRequest = { + escrow_receipt: EscrowReceipt; + sale_id: Text; + broker_id: ?Principal; + }; + + public type BidResponse = TransactionRecord; + + public type PricingConfig = MigrationTypes.Current.PricingConfig; + + public type AuctionConfig = MigrationTypes.Current.AuctionConfig; + + + public let AuctionConfigDefault = { + reserve = null; + token = TokenSpecDefault; + buy_now = null; + start_price = 0; + start_date = 0; + ending = #date(0); + min_increase = #amount(0); + }; + + public type NFTInfoStable = { + current_sale : ?SaleStatusStable; + metadata : CandyTypes.CandyValue; + }; + + + + public type AuctionState = MigrationTypes.Current.AuctionState; + + + public type SaleStatus = MigrationTypes.Current.SaleStatus; + + public type SaleStatusStable = { + sale_id: Text; //sha256?; + original_broker_id: ?Principal; + broker_id: ?Principal; + token_id: Text; + sale_type: { + #auction: AuctionStateStable; + }; + }; + + public type MarketTransferRequestReponse = TransactionRecord; + + public type Account = MigrationTypes.Current.Account; + + public type HttpAccess= { + identity: Principal; + expires: Time.Time; + }; + + + + public type StorageMetrics = { + allocated_storage: Nat; + available_space: Nat; + allocations: [AllocationRecordStable]; + }; + + + public type BucketData = { + principal : Principal; + var allocated_space: Nat; + var available_space: Nat; + date_added: Int; + b_gateway: Bool; + var version: (Nat, Nat, Nat); + var allocations: Map.Map<(Text,Text), Int>; // (token_id, library_id), Timestamp + }; + + public type AllocationRecord = { + canister : Principal; + allocated_space: Nat; + var available_space: Nat; + var chunks: SB.StableBuffer; + token_id: Text; + library_id: Text; + }; + + public type AllocationRecordStable = { + canister : Principal; + allocated_space: Nat; + available_space: Nat; + chunks: [Nat]; + token_id: Text; + library_id: Text; + }; + + public func allocation_record_stabalize(item:AllocationRecord) : AllocationRecordStable{ + {canister = item.canister; + allocated_space = item.allocated_space; + available_space = item.available_space; + chunks = SB.toArray(item.chunks); + token_id = item.token_id; + library_id = item. library_id;} + }; + + public type TransactionRecord = MigrationTypes.Current.TransactionRecord; + + public type NFTUpdateRequest ={ + #replace:{ + token_id: Text; + data: CandyTypes.CandyValue; + }; + #update:{ + token_id: Text; + app_id: Text; + update: CandyTypes.UpdateRequest; + } + }; + + public type NFTUpdateResponse = Bool; + + public type EndSaleResponse = TransactionRecord; + + public type EscrowRecord = MigrationTypes.Current.EscrowRecord; + + public type ManageSaleRequest = { + #end_sale : Text; //token_id + #open_sale: Text; //token_id; + #escrow_deposit: EscrowRequest; + #refresh_offers: ?Account; + #bid: BidRequest; + #withdraw: WithdrawRequest; + }; + + public type ManageSaleResponse = { + #end_sale : EndSaleResponse; //trx record if succesful + #open_sale: Bool; //true if opened, false if not; + #escrow_deposit: EscrowResponse; + #refresh_offers: [EscrowRecord]; + #bid: BidResponse; + #withdraw: WithdrawResponse; + }; + + public type SaleInfoRequest = { + #active : ?(Nat, Nat); //get al list of active sales + #history : ?(Nat, Nat); //skip, take + #status : Text; //saleID + #deposit_info : ?Account; + }; + + public type SaleInfoResponse = { + #active: { + records: [(Text, ?SaleStatusStable)]; + eof: Bool; + count: Nat}; + #history : { + records: [?SaleStatusStable]; + eof: Bool; + count : Nat}; + #status: ?SaleStatusStable; + #deposit_info: SubAccountInfo; + }; + + + + public type StakeRecord = {amount: Nat; staker: Account; token_id: Text;}; + + public type BalanceResponse = { + multi_canister: ?[Principal]; + nfts: [Text]; + escrow: [EscrowRecord]; + sales: [EscrowRecord]; + stake: [StakeRecord]; + offers: [EscrowRecord]; + }; + + public type LocalStageLibraryResponse = { + #stage_remote : { + allocation :AllocationRecord; + metadata: CandyTypes.CandyValue; + }; + #staged : Principal; + }; + + public type StageLibraryResponse = { + canister: Principal; + }; + + public type WithdrawDescription = { + buyer: Account; + seller: Account; + token_id: Text; + token: TokenSpec; + amount: Nat; + withdraw_to : Account; + }; + + public type RejectDescription = { + buyer: Account; + seller: Account; + token_id: Text; + token: TokenSpec; + }; + + public type WithdrawRequest = { + #escrow: WithdrawDescription; + #sale: WithdrawDescription; + #reject:RejectDescription; + }; + + + public type WithdrawResponse = TransactionRecord; + + public type CollectionInfo = { + fields: ?[(Text, ?Nat, ?Nat)]; + logo: ?Text; + name: ?Text; + symbol: ?Text; + total_supply: ?Nat; + owner: ?Principal; + managers: ?[Principal]; + network: ?Principal; + token_ids: ?[Text]; + token_ids_count: ?Nat; + multi_canister: ?[Principal]; + multi_canister_count: ?Nat; + metadata: ?CandyTypes.CandyValue; + allocated_storage : ?Nat; + available_space : ?Nat; + }; + + public type CollectionData = { + var logo: ?Text; + var name: ?Text; + var symbol: ?Text; + var metadata: ?CandyTypes.CandyValue; + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + var allocated_storage: Nat; + var available_space : Nat; + var active_bucket: ?Principal; + }; + + public type CollectionDataForStorage = { + + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + + }; + + public type ManageStorageRequest = { + #add_storage_canisters : [(Principal, Nat, (Nat, Nat, Nat))]; + }; + + public type ManageStorageResponse = { + #add_storage_canisters : (Nat,Nat);//space allocated, space available + }; + + public type LogEntry = { + event : Text; + timestamp: Int; + data: CandyTypes.CandyValue; + caller: ?Principal; + }; + + public type OrigynError = {number : Nat32; text: Text; error: Errors; flag_point: Text;}; + + public type Errors = { + #app_id_not_found; + #asset_mismatch; + #attempt_to_stage_system_data; + #auction_ended; + #auction_not_started; + #bid_too_low; + #cannot_find_status_in_metadata; + #cannot_restage_minted_token; + #content_not_deserializable; + #content_not_found; + #deposit_burned; + #escrow_cannot_be_removed; + #escrow_owner_not_the_owner; + #escrow_withdraw_payment_failed; + #existing_sale_found; + #id_not_found_in_metadata; + #improper_interface; + #item_already_minted; + #item_not_owned; + #library_not_found; + #malformed_metadata; + #no_escrow_found; + #not_enough_storage; + #out_of_range; + #owner_not_found; + #property_not_found; + #receipt_data_mismatch; + #sale_not_found; + #sale_not_over; + #sale_id_does_not_match; + #sales_withdraw_payment_failed; + #storage_configuration_error; + #token_not_found; + #token_id_mismatch; + #token_non_transferable; + #unauthorized_access; + #unreachable; + #update_class_error; + #validate_deposit_failed; + #validate_deposit_wrong_amount; + #validate_deposit_wrong_buyer; + #validate_trx_wrong_host; + #withdraw_too_large; + #nyi; + + }; + + public type HTTPResponse = { + body : Blob; + headers : [HeaderField]; + status_code : Nat16; + streaming_strategy : ?StreamingStrategy; + }; + + + + public type StreamingCallback = query (StreamingCallbackToken) -> async (StreamingCallbackResponse); + + + + public type StreamingCallbackResponse = { + body : Blob; + token : ?StreamingCallbackToken; + }; + + public type StorageService = actor{ + stage_library_nft_origyn : shared (StageChunkArg, AllocationRecordStable, CandyTypes.CandyValue) -> async Result.Result; + storage_info_nft_origyn : shared query () -> async Result.Result; + chunk_nft_origyn : shared query ChunkRequest -> async Result.Result; + refresh_metadata_nft_origyn : (token_id: Text, metadata: CandyTypes.CandyValue) -> async Result.Result + }; + + public type Service = actor { + __advance_time : shared Int -> async Int; + __set_time_mode : shared { #test; #standard } -> async Bool; + balance : shared query EXT.BalanceRequest -> async BalanceResponse; + balanceEXT : shared query EXT.BalanceRequest -> async BalanceResponse; + balanceOfDip721 : shared query Principal -> async Nat; + balance_of_nft_origyn : shared query Account -> async Result.Result; + bearer : shared query EXT.TokenIdentifier -> async Result.Result; + bearerEXT : shared query EXT.TokenIdentifier -> async Result.Result; + bearer_nft_origyn : shared query Text -> async Result.Result; + bearer_batch_nft_origyn : shared query [Text] -> async [Result.Result]; + bearer_secure_nft_origyn : shared Text -> async Result.Result; + bearer_batch_secure_nft_origyn : shared [Text] -> async [Result.Result]; + canister_status : shared { + canister_id : canister_id; + } -> async canister_status; + collection_nft_origyn : (fields : ?[(Text, ?Nat, ?Nat)]) -> async Result.Result; + collection_update_nft_origyn : (ManageCollectionCommand) -> async Result.Result; + collection_update_batch_nft_origyn : ([ManageCollectionCommand]) -> async [Result.Result]; + cycles : shared query () -> async Nat; + getEXTTokenIdentifier : shared query Text -> async Text; + get_nat_as_token_id : shared query Nat -> async Text; + get_token_id_as_nat : shared query Text -> async Nat; + http_request : shared query HttpRequest -> async HTTPResponse; + http_request_streaming_callback : shared query StreamingCallbackToken -> async StreamingCallbackResponse; + manage_storage_nft_origyn : shared ManageStorageRequest -> async Result.Result; + market_transfer_nft_origyn : shared MarketTransferRequest -> async Result.Result; + market_transfer_batch_nft_origyn : shared [MarketTransferRequest] -> async [Result.Result]; + mint_nft_origyn : shared (Text, Account) -> async Result.Result; + nftStreamingCallback : shared query StreamingCallbackToken -> async StreamingCallbackResponse; + chunk_nft_origyn : shared query ChunkRequest -> async Result.Result; + history_nft_origyn : shared query (Text, ?Nat, ?Nat) -> async Result.Result<[TransactionRecord],OrigynError>; + nft_origyn : shared query Text -> async Result.Result; + update_app_nft_origyn : shared NFTUpdateRequest -> async Result.Result; + ownerOf : shared query Nat -> async DIP721.OwnerOfResponse; + ownerOfDIP721 : shared query Nat -> async DIP721.OwnerOfResponse; + share_wallet_nft_origyn : shared ShareWalletRequest -> async Result.Result; + sale_nft_origyn : shared ManageSaleRequest -> async Result.Result; + sale_info_nft_origyn : shared SaleInfoRequest -> async Result.Result; + stage_library_nft_origyn : shared StageChunkArg -> async Result.Result; + stage_nft_origyn : shared { metadata : CandyTypes.CandyValue } -> async Result.Result; + storage_info_nft_origyn : shared query () -> async Result.Result; + transfer : shared EXT.TransferRequest -> async EXT.TransferResponse; + transferEXT : shared EXT.TransferRequest -> async EXT.TransferResponse; + transferFrom : shared (Principal, Principal, Nat) -> async DIP721.Result; + transferFromDip721 : shared (Principal, Principal, Nat) -> async DIP721.Result; + whoami : shared query () -> async Principal; + }; + + +//DIP721 Types + + type DIP721OwnerResult = { + #Err: DIP721ApiError; + #Ok: Principal; + }; + + type DIP721APIError = { + #Unauthorized; + #InvalidTokenId; + #ZeroAddress; + #Other; + }; + + type TxReceipt ={ + #Err: DIP721APIError; + #Ok: nat; + }; + + type DIP721InterfaceId = + variant { + Approval; + TransactionHistory; + Mint; + Burn; + TransferNotification; + }; + + type DIP721ExtendedMetadataResult = + record { + metadata_desc: DIP721MetadataDesc; + token_id: nat64; + }; + type DIP721MetadataResult = + variant { + Err: DIP721ApiError; + Ok: DIP721MetadataDesc; + }; + + type DIP721MetadataDesc = vec DIP721MetadataPart; + + type MetadataPart = + record { + purpose: DIP721MetadataPurpose; + key_val_data: vec DIP721MetadataKeyVal; + data: blob; + }; + + type DIP721MetadataPurpose = + variant { + Preview; // used as a preview, can be used as preivew in a wallet + Rendered; // used as a detailed version of the NFT + }; + + type DIP721MetadataKeyVal = + record { + text; + DIP721MetadataVal; + }; + + type DIP721MetadataVal = + variant { + TextContent : Text; + BlobContent : blob; + NatContent : Nat; + Nat8Content: Nat8; + Nat16Content: Nat16; + Nat32Content: Nat32; + Nat64Content: Nat64; + }; + + + // EXT Types + + type EXTBalanceRequest = { + user : EXTUser; + token: EXTTokenIdentifier; + }; + + // A user can be any principal or canister, which can hold a balance + type EXTUser = { + #address : EXTAccountIdentifier; //No notification + #principal : Principal; //defaults to sub account 0 + }; + + type EXTAccountIdentifier = Text; + type SubAccount = [Nat8]; + + type CommonError = { + #InvalidToken: Text; + #Other : Text; + }; + + type EXTTransferRequest = { + from : EXTUser; + to : EXTUser; + token : Text; + amount : Nat; + memo : Blob; + notify : Bool; + subaccount : ?[Nat8]; + }; + type EXTTransferResponse = Result; + + type EXTMetadata = { + #fungible : { + name : Text; + symbol : Text; + decimals : Nat8; + metadata : ?Blob; + }; + #nonfungible : { + metadata : ?Blob; + }; + }; + +``` + + +### Http NFT Information + +exos.host/_/canister_id/_/token_id - Returns the primary asset + +exos.host/_/canister_id/_/token_id/preview - Returns the preview asset + +exos.host/_/canister_id/_/token_id/ex - Origyn NFTs are self contained internet addressable objects. All the data for rendering is contained inside the NFT(Authors can choose to host data on other platforms). Returns an HTML interface that displays the NFT according to the NFT authors specification. + +exos.host/_/canister_id/_/token_id/_/library_id - Returns the asset in the library + +exos.host/_/canister_id/_/token_id/_/library_id/info - Returns a json representation of assets in the library + +exos.host/_/canister_id/_/token_id/info - Returns a json representation of the metadata, including the library items + +exos.host/_/canister_id/_/token_id/info?query=[Query] - Returns a json representation of the metadata passed through a query + +### Collection Information + +exos.host/_/canister_id/collection - Returns a json representation of collection information + + + + + diff --git a/docs/nft_sale.md b/docs/nft_sale.md new file mode 100644 index 0000000..da2af61 --- /dev/null +++ b/docs/nft_sale.md @@ -0,0 +1,416 @@ +# NFT Sale Canister + +🚀 Feature-rich NFT sales canister that empowers NFT creators and consumers. This particular smart contract enables NFT authors to take control and manage its digital assets in a very innovative way. + +#### Creator features : + +- Sell NFTs for different amounts and to different sets of people + +- Create different groups and collections + +- Allow free mint for certain groups + +- Create a team group + +- Auto mint + +- Raffle NFTs + +- Ranking groups + +#### User features : + +- Claim NFT with membership rights + +- Use Plug id to reserve number of items in a collection + +- Submit ICP to get whitelisted and be reimbursed if I don’t get an allocation + +- Register and review my escrow + +- See the groups I’m registered for the drop + +--- + +#### Data workflow + +- Manage NFTs - calls ==> Add Inventory Item ( Adds NFTs to inventory ) +- Manage groups ( Allows the creator to create and manage groups. These groups can be allocated a certain number of NFTs ) +- Manage Reservation ( Allows a creator to associate a set of nfts with a particular group or address ) +- Get Groups ( Retrieves a list of groups for a particular user or address ) + +**To deploy ONLY the origyn_sale_reference run the following from the root directory :** + +``` + yes yes | bash nft_sales_runner.sh +``` + +**Candid Interface (make sure you change the ids respectively)** + +``` +http://localhost:8000/?canisterId=ryjl3-tyaaa-aaaaa-aaaba-cai&id=rrkah-fqaaa-aaaaa-aaaaq-cai +``` + +**Canister init arguments** + +``` +public type InitArgs = { + owner: Principal; //owner of the canister + allocation_expiration: Int; //amount of time to keep an allocation for 900000000000 = 15 minutes + nft_gateway: ?Principal; //the nft gateway canister this sales canister will sell NFTs for + sale_open_date : ?Int; //date that the NFTs in the registration shold be minted/allocated + registration_date: ?Int; //date that registations open up + end_date: ?Int; //date that the canister closes its sale + required_lock_date: ?Int //date that users must lock their tokens until to qualify for reservations +}; + +``` + +**Call get_groups** + +``` + dfx canister call origyn_sale_reference get_groups +``` + +**Call redeem_allocation** + +``` + dfx canister call origyn_sale_reference redeem_allocation '(record {escrow_receipt=record {token=variant {ic=record {fee=200000; decimals=8; canister=principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; standard=variant {DIP20}; symbol="DIP20"}}; token_id="OG1"; seller=variant {"principal" = principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae"}; buyer=variant {"principal" = principal "3j2qa-oveg3-2agc5-735se-zsxjj-4n65k-qmnse-byzkf-4xhw5-mzjxe-pae"}; amount=100000000}})' +``` + +**Call register_escrow** + +``` + dfx canister call origyn_sale_reference redeem_allocation '(record {escrow_receipt=record {token=variant {ic=record {fee=200000; decimals=8; canister=principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; standard=variant {DIP20}; symbol="DIP20"}}; token_id="OG1"; seller=variant {"principal" = principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae"}; buyer=variant {"principal" = principal "3j2qa-oveg3-2agc5-735se-zsxjj-4n65k-qmnse-byzkf-4xhw5-mzjxe-pae"}; amount=100000000}})' +``` + +**Call add_inventory_item** + +``` +dfx canister call origyn_sale_reference add_inventory_item '(record { key = "first"; item = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "1"; available = true; sale = opt 100; } })' +``` + +**Call manage_nfts (add)** + +``` + + dfx canister call origyn_sale_reference manage_nfts '( vec { variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "1"; available = true; sale_block = opt 100; } } ; variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "2"; available = true; sale_block = opt 100; } } ; variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "3"; available = true; sale_block = opt 100; } } ; variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "4"; available = true; sale_block = opt 100; } } ; variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "5"; available = true; sale_block = opt 100; } } ; variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "6"; available = true; sale_block = opt 100; } } ; variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "7"; available = true; sale_block = opt 100; } } ; } )' + + dfx canister call origyn_sale_reference manage_nfts '( variant { add = record {key = "second"; item = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "2"; available = true; sale = opt 100; } ;} })' + + dfx canister call origyn_sale_reference manage_nfts '( variant { add = record {key = "third"; item = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "3"; available = true; sale = opt 100; } ;} })' + + dfx canister call origyn_sale_reference manage_nfts '( variant { add = record {key = "fourth"; item = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "4"; available = true; sale = opt 100; } ;} })' + + #remove + dfx canister call origyn_sale_reference manage_nfts '( vec { variant { remove = "1"}; variant {remove = "2" }; variant { remove = "3"}; variant { remove = "4"}; variant { remove = "5"}; } )' +``` + +**Call manage_nfts (remove)** + +``` + dfx canister call origyn_sale_reference manage_nfts '(variant { remove = record { key = "first" ;} })' +``` + +**Call manage_group** + +``` +#add + dfx canister call origyn_sale_reference manage_group '(variant { add = record { allowed_amount = opt 10; members = vec {principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"}; namespace = "alpha"; pricing=opt vec {record {cost_per=record {token=variant {ic=record {fee=100000; decimals=8; canister=principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; standard=variant {DIP20}; symbol="DIP20"}}; amount=1000000}}};} })' + + +#remove + dfx canister call origyn_sale_reference manage_group '(variant { remove = record { namespace = "alpha"; } })' + dfx canister call origyn_sale_reference manage_group '(variant { remove = record { namespace = "beta"; } })' + + #addMembers + dfx canister call origyn_sale_reference manage_group '(variant { addMembers = record { namespace = "alpha"; members = vec {principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe" ; principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae"}; } })' + + dfx canister call origyn_sale_reference manage_group '(variant { addMembers = record { namespace = "alpha"; members = vec {principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe" ; principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae" ; principal "3j2qa-oveg3-2agc5-735se-zsxjj-4n65k-qmnse-byzkf-4xhw5-mzjxe-pae"}; } })' + +dfx canister call origyn_sale_reference manage_group '(variant { addMembers = record { namespace = "alpha"; members = vec {principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe" ; principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae" ; principal "3j2qa-oveg3-2agc5-735se-zsxjj-4n65k-qmnse-byzkf-4xhw5-mzjxe-pae"; principal "g26iu-e3i6k-ysc3e-6rdwn-lztzb-3uazv-ui6os-o7eqz-touw2-42tsd-lae"}; } })' + + + dfx canister call origyn_sale_reference manage_group '(variant { addMembers = record { namespace = "beta"; members = vec {principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe" ; principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae"}; } })' + + #removeMembers + dfx canister call origyn_sale_reference manage_group '(variant { removeMembers = record { namespace = "alpha"; members = vec {principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe" ; principal "u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae"}; } })' + + #updatePricing +dfx canister call origyn_sale_reference manage_group '(variant { updatePricing = record { namespace = "alpha"; pricing=opt vec {record {cost_per=record {token=variant {ic=record {fee=100000; decimals=8; canister=principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; standard=variant {DIP20}; symbol="DIP20"}};} })' + +dfx canister call origyn_sale_reference manage_group '(variant {updatePricing=record {pricing=opt vec {record {cost_per=record {token=variant {ic=record {fee=200000; decimals=8; canister=principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; standard=variant {DIP20}; symbol="DIP20"}}; amount=1000000}}}; namespace="alpha"}})' + +dfx canister call origyn_sale_reference manage_group '(variant {updatePricing=record {pricing=opt vec {record {cost_per=record {token=variant {ic=record {fee=200000; decimals=8; canister=principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; standard=variant {DIP20}; symbol="DIP20"}}; amount=100000}}}; namespace="beta"}})' + + +#updateAllowedAmount +dfx canister call origyn_sale_reference manage_group '(variant { updateAllowedAmount = record { namespace = "alpha"; allowed_amount = opt 15; } })' +``` + +**Call manage_reservation - reservation_type => groups** + +``` +#add + dfx canister call origyn_sale_reference manage_reservation '(variant {add=record {reservation_type=variant {Groups=vec {"uno"; "dos";}}; nfts=vec {"a"; "b"}; namespace="beta"; exclusive=true}})' + + #remove + dfx canister call origyn_sale_reference manage_reservation '(variant {remove=record { namespace="beta"; }})' + + #addNFTs + + // Add duplicates + dfx canister call origyn_sale_reference manage_reservation '(variant {addNFTs = record { nfts=vec {"a"; "a"; "a";}; namespace="beta"; }})' + + // Add different nfts - they may be duplicated but the system will handle it + dfx canister call origyn_sale_reference manage_reservation '(variant {addNFTs = record { nfts=vec {"a"; "b"; "c";}; namespace="beta"; }})' + + #removeNFTs + dfx canister call origyn_sale_reference manage_reservation '(variant {removeNFTs = record { nfts=vec {"a"; "a"; "a";}; namespace="beta"; }})' + + dfx canister call origyn_sale_reference manage_reservation '(variant {removeNFTs = record { nfts=vec {"a"; "b"; "c";}; namespace="beta"; }})' +``` + +**Call manage_reservation - reservation_type => principal** + +``` +#add + dfx canister call origyn_sale_reference manage_reservation '(variant {add=record {reservation_type=variant {Principal=principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"}; nfts=vec {"1"}; namespace="alpha"; exclusive=true}})' + + #remove + dfx canister call origyn_sale_reference manage_reservation '(variant {remove=record { namespace="alpha"; }})' + + #addNFTs + + // Add duplicates + dfx canister call origyn_sale_reference manage_reservation '(variant {addNFTs = record { nfts=vec {"1"; "1"; "1";}; namespace="alpha"; }})' + + // Add different nfts - they may be duplicated but the system will handle it + dfx canister call origyn_sale_reference manage_reservation '(variant {addNFTs = record { nfts=vec {"1"; "2"; "3";}; namespace="alpha"; }})' + + +#removeNFTs + + // Remove duplicates + dfx canister call origyn_sale_reference manage_reservation '(variant {removeNFTs = record { nfts=vec {"1"; "1"; "1";}; namespace="alpha"; }})' + + // Remove different nfts - they may be duplicated but the system will handle it + dfx canister call origyn_sale_reference manage_reservation '(variant {removeNFTs = record { nfts=vec {"1"; "2"; "3";}; namespace="alpha"; }})' + + #update_type + + // Change to principal type + dfx canister call origyn_sale_reference manage_reservation '(variant {update_type = record { reservation_type=variant {Groups=vec {"uno"; "dos";}}; namespace="alpha"; }})' + + // Change to principal type + dfx canister call origyn_sale_reference manage_reservation '(variant {update_type = record { reservation_type=variant {Principal=principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"}; namespace="alpha"; }})' + + +``` + +**Call get_total_reservations_tree** + +``` + dfx canister call origyn_sale_reference get_total_reservations_tree +``` + +**Call allocate_nfts** + +``` + dfx canister call origyn_sale_reference manage_reservation '(variant {add=record {reservation_type=variant {Principal=principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"}; nfts=vec {"gamma"}; namespace="alpha"; exclusive=true}})' +``` + +--- + +- [ ] **NFT-172. AA NFT Creator IWT sell my NFTs for a different amount and to different sets of people.** + +- [ ] **NFT-173. AA user IWT claim by an NFT with my membership rights that are granted by holding another NFT(ie mintpass).** + +- [ ] **NFT-174. AA NFT Creator IWT be able to have different groups of allow lists.** + +- [ ] **NFT-177. AAA Buyer IWT use a plug ID to reserve a number of items in a collection STI can have time to pay for them.** + +- [ ] **NFT-178. AA Creator IWT allow a free mint for a tier.** + +- [ ] **NFT-179. AA Creator IWT create a team group so that I can allocate some NFTs to my team.** + +- [ ] **NFT-180. AA Creator IWT supply a list of user principals for a range of my nfts STI can have them auto minted to a team/tier/etc.** + +- [ ] **NFT-181. AA Creator IWT be able to raffle a set of NFTs to a set of users.** + +- [ ] **NFT-182. AA user IWT submit my ICP to get on the list and then get it back if I don't get an allocation.** + +- [ ] **NFT-184. AA creator IWT create a ranking of groups so that they get allocated in order.** + +- [ ] **NFT-185. AA creator IWT indicate a fall through mechanism STI can either have a group completely filled or only allocate n number before falling through to the next tier.** + +- [ ] **NFT-186. AA user IWT to be able to review my escrow registration STI know I am in line for an allocation.** + +- [ ] **NFT-187. AA user IWT be able to see what groups I'm in STI know I'm registered for the drop.** + +- [ ] **NFT-189. AA user IWT register my escrow with a sale STI get an allocation on the drop date.** + +test_runner_sale + +testLoadNFTs + +- Create wallet a & wallet b +- Create newPrincipal from canister factory send owner() and storage(null) +- Create canister from Service (origyn_nft_reference) +- Stage nfts from utils.buildStandardNFT +- Add NFTs sale_canister.manage_nfts_sale_nft_origyn +- See final total inventory or individual + +Questions: + +- You can add minted or unminted NFTs to inventory. Is this correct? + +testManagement + +- Create SaleCanister instance +- UpdateNFTGateway +- UpdateAllocationExpiration +- UpdateSaleOpenDate +- UpdateRegistrationDate +- UpdateEndDate +- get_metrics_sale_nft_origyn + +Questions: + +- You can set_canister_sale_open_date_low for a past date like yesterday. Is this correct? +- You can set_canister_registration_date_low for a past date like yesterday + 200 nanos. Is this correct? +- You can set_canister_end_date_low for a past date like yesterday + 300 nanos. Is this correct? + +testAllocation + +add_unminted_1 +defaultGroup +bGroup +cGroup +allocate_0 +allocate_1 +allocate_2 +allocate_3 +allocate_4 +allocate_5 +balance_check_1 +balance_check_2 +balance_check_3 +expiration +expired_check_1 +expired_check_2 +expired_check_3 +allocate\_\_retry_1 + +``` +D.print("testAllocation : " # "\n\n" # +"add_unminted_1 : " # debug_show(add_unminted_1) # "\n\n" # +"defaultGroup : " # debug_show(defaultGroup) # "\n\n" # +"bGroup : " # debug_show(bGroup) # "\n\n" # +"cGroup : " # debug_show(cGroup) # "\n\n" # +"allocate_0 : " # debug_show(allocate_0) # "\n\n" # +"allocate_1 : " # debug_show(allocate_1) # "\n\n" # +"allocate_2 : " # debug_show(allocate_2) # "\n\n" # +"allocate_3 : " # debug_show(allocate_3) # "\n\n" # +"allocate_4 : " # debug_show(allocate_4) # "\n\n" # +"allocate_5 : " # debug_show(allocate_5) # "\n\n" # +"balance_check_1 : " # debug_show(balance_check_1) # "\n\n" # +"balance_check_2 : " # debug_show(balance_check_2) # "\n\n" # +"balance_check_3 : " # debug_show(balance_check_3) # "\n\n" # +"expiration : " # debug_show(expiration) # "\n\n" # +"expired_check_1 : " # debug_show(expired_check_1) # "\n\n" # +"expired_check_2 : " # debug_show(expired_check_2) # "\n\n" # +"expired_check_3 : " # debug_show(expired_check_3) # "\n\n" # +"allocate__retry_1 : " # debug_show(allocate__retry_1) # "\n\n" +); +``` + +testReservation + +``` +D.print("Test reservation sale : " # "\n\n" # +"add_unminted_1 : " # debug_show(add_unminted_1) # "\n\n" # +"defaultGroup : " # debug_show(defaultGroup) # "\n\n" # +"aGroup : " # debug_show(aGroup) # "\n\n" # +"bGroup : " # debug_show(bGroup) # "\n\n" # +"a_principal_request : " # debug_show(a_principal_request) # "\n\n" # +"a_group_request : " # debug_show(a_group_request) # "\n\n" # +"b_group_request : " # debug_show(b_group_request) # "\n\n" # +"aRedeem_payment_2 : " # debug_show(aRedeem_payment_2) # "\n\n" # +"a_wallet_try_escrow_general_valid : " # debug_show(a_wallet_try_escrow_general_valid) # "\n\n" # +"a_wallet_try_register_for_one : " # debug_show(a_wallet_try_register_for_one) # "\n\n" # +"a_wallet_registration_after_one : " # debug_show(a_wallet_registration_after_one) # "\n\n" # +"bRedeem_payment_2 : " # debug_show(bRedeem_payment_2) # "\n\n" # +"b_wallet_try_escrow_general_valid : " # debug_show(b_wallet_try_escrow_general_valid) # "\n\n" # +"b_wallet_try_register_for_four : " # debug_show(b_wallet_try_register_for_four) # "\n\n" # +"b_wallet_registration_after_four : " # debug_show(b_wallet_registration_after_four) # "\n\n" # +"dRedeem_payment_2 : " # debug_show(dRedeem_payment_2) # "\n\n" # +"d_wallet_try_escrow_general_valid : " # debug_show(d_wallet_try_escrow_general_valid) # "\n\n" # +"d_wallet_try_register_for_two : " # debug_show(d_wallet_try_register_for_two) # "\n\n" # +"d_wallet_registration_after_two : " # debug_show(d_wallet_registration_after_two) # "\n\n" # +"d_balance_before_allocation : " # debug_show(d_balance_before_allocation) # "\n\n" # +"d_allocate_empty : " # debug_show(d_allocate_empty) # "\n\n" # +"a_allocate_empty : " # debug_show(a_allocate_empty) # "\n\n" # +"a_wallet_try_redeem_for_one : " # debug_show(a_wallet_try_redeem_for_one) # "\n\n" # +"b_allocate_empty : " # debug_show(b_allocate_empty) # "\n\n" # +"b_wallet_try_redeem_for_one : " # debug_show(b_wallet_try_redeem_for_one) # "\n\n" # +"a_allocate_empty_after_two : " # debug_show(a_allocate_empty_after_two) # "\n\n" # +"a_wallet_try_redeem_for_third : " # debug_show(a_wallet_try_redeem_for_third) # "\n\n" # +"c_allocate_empty_after_two : " # debug_show(c_allocate_empty_after_two) # "\n\n" # +"c_wallet_try_redeem_for_one : " # debug_show(c_wallet_try_redeem_for_one) # "\n\n" # +"a_wallet_registration_after_allocation : " # debug_show(a_wallet_registration_after_allocation) # "\n\n" # +"b_wallet_registration_after_allocation : " # debug_show(b_wallet_registration_after_allocation) # "\n\n" # +"c_wallet_registration_after_allocation : " # debug_show(c_wallet_registration_after_allocation) # "\n\n" # +"d_wallet_registration_after_allocation : " # debug_show(d_wallet_registration_after_allocation) # "\n\n" # +"a_wallet_balance_after_three : " # debug_show(a_wallet_balance_after_three) # "\n\n" # +"b_wallet_balance_after_three : " # debug_show(b_wallet_balance_after_three) # "\n\n" # +"c_wallet_balance_after_three : " # debug_show(c_wallet_balance_after_three) # "\n\n" # +"c_allocate_empty_2_end : " # debug_show(c_allocate_empty_2_end) # "\n\n" # +"a_allocate_empty_3_end : " # debug_show(a_allocate_empty_3_end) # "\n\n" # +"d_allocate_empty_3_end : " # debug_show(d_allocate_empty_3_end) # "\n\n" +); +``` + +``` +# METRICS +# Get metrics -sale canister - manage_sale_nft_origyn +dfx canister call origyn_sale_reference get_metrics_sale_nft_origyn + +# UpdateNFTGateway +dfx canister call origyn_sale_reference manage_sale_nft_origyn '( variant {UpdateNFTGateway = opt principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe" })' + +# UpdateAllocationExpiration +dfx canister call origyn_sale_reference manage_sale_nft_origyn '( variant {UpdateAllocationExpiration = 555 })' + +# UpdateSaleOpenDate +dfx canister call origyn_sale_reference manage_sale_nft_origyn '( variant {UpdateSaleOpenDate = opt 1654015582666898000 })' + +# UpdateRegistrationDate +dfx canister call origyn_sale_reference manage_sale_nft_origyn '( variant {UpdateRegistrationDate = opt 1654021827313225000 })' + +# UpdateEndDate +dfx canister call origyn_sale_reference manage_sale_nft_origyn '( variant {UpdateEndDate = opt 1654031827313226000 })' + +``` + +``` +# Manage NFTS - manage_nfts_sale_nft_origyn +#toke_id = "1" +dfx canister call origyn_sale_reference manage_nfts_sale_nft_origyn '( vec { variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "1"; available = true; sale_block = opt null; } }})' + +#toke_id = "2" +dfx canister call origyn_sale_reference manage_nfts_sale_nft_origyn '( vec { variant { add = record { canister = principal "dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"; token_id = "2"; available = true; sale_block = opt null; } }})' + +#token_id = "3", "4", "5" +dfx canister call origyn_sale_reference manage_nfts_sale_nft_origyn '( vec { variant { add = record { canister = principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"; token_id = "3"; available = true; sale_block = opt null; }}; variant { add = record { canister = principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"; token_id = "4"; available = true; sale_block = opt null; } }; variant { add = record { canister = principal "xr75m-zryhp-v2f4r-kzhsj-62bf2-azbg7-fwrt6-zcdgv-zabu3-qylvn-5qe"; token_id = "5"; available = true; sale_block = opt null; } }})' + + +``` + +``` +# Get full inventory +dfx canister call origyn_sale_reference get_inventory_sale_nft_origyn + +# get inventory item +dfx canister call origyn_sale_reference get_inventory_item_sale_nft_origyn '("1")' +``` diff --git a/docs/release_notes.md b/docs/release_notes.md new file mode 100644 index 0000000..c55b7d4 --- /dev/null +++ b/docs/release_notes.md @@ -0,0 +1,137 @@ + +Definitions + +App Data - ORIGYN NFTs have a simple database inside of them. Apps can store data in a reserved space that can have flexible permissions. The apps can make it so that only they can read the data and/or only they can write the data. They can also grant write permissions to certain other principals via an allow list. Currently, the implementation is more like a structured notepad where you have to write out the entered note each time. Future versions will add granular access to data per app. + +Collection Manager - Collection Managers are granted certain rights over a collection. This allows collection owners to grant third-party apps some rights over their collection. Owners should only grant this privilege to managers that they trust, preferably with applications that are open-sourced and audited. + + Implemented Manager Rights: + + - stage + - mint + - market transfers + - manage sales + + +Collection Owner - A collection can have one owner. An owner has specific and broad rights over a collection except for a few instances(changing immutable data being the most prominent) + +Escrow - All NFT sales require an escrow. The tokens must be deposited in the canister and registered before a bid or peer-to-peer sale can take place + +Experience Asset - An asset that the creator of an NFT can specify as the canonical experience of an nft. Usually an HTML page. + +Gateway Canister - The gateway canister is the "main" canister for your NFT collection. All metadata resides on the gateway canister and thus you are limited to about 2GB of metadata + history at the moment. + +Hidden Asset - An asset that is shown if a user tries to look at an NFT before it is minted. + +Network - Each NFT Collection has a network that it pays network fees to when an NFT is transacted. This network can also make governance changes to the NFT and can change immutable data. Users should only set the network to decentralized DAOs such as the ORIGYN Network as the network has "god mode" over your collection + +Preview Asset - A smaller asset in the NFT that is good for showing in lists. + +Primary Asset - Each NFT can assign a "primary" asset. The NFT expects this to be loaded when viewed from a list of NFTs. + +Storage Canister - A storage canister holds library files. The gateway canister is in charge of distributing those files to the swarm of storage canisters. + +Token-id - Each token in your collection has a unique textbase namespace id. + +Library-id - Each library item in your token's asset library has a unique text-based namespace id. + + + +v0.1.0 + +* Storage - Supports Manually Adding Storage Canisters to a Gateway Canister +* Storage - Gateway Canisters support up to 2GB of Storage +* Upgrades - Implemented Migration Scheme. See https://github.com/ZhenyaUsenko/motoko-migrations +* Logging - Basic logging - Details are not saved +* Marketplace - Peer to Peer market transactions with escrow +* Marketplace - Make offers on NFTs with escrow, Owner can reject and refund +* Marketplace - Sub-account-based deposits and escrow transfer. +* Marketplace - Auctions - Buy It Now +* Marketplace - Auctions - Reserve Price +* Marketplace - Auctions - Increase Amount +* Marketplace - Supports Ledger Style Transactions (ICP, OGY) +* Marketplace - Deprecated end_sale_nft_origyn - see sale_nft_origyn +* Marketplace - Deprecated escrow_nft_origyn - see sale_nft_origyn +* Marketplace - Deprecated withdraw_nft_origyn - see sale_nft_origyn +* Marketplace - Deprecated bid_nft_origyn - see sale_nft_origyn +* Marketplace - Manual sale withdraws +* Marketplace - Auctions for Minted NFTS +* Marketplace - Royalty distribution +* Marketplace - broker code for peer-to-peer and auctions. +* Marketplace - Royalty split for auctions between the listing broker and bid broker +* Marketplace - Time-locked escrows for pre-sales +* Data - Read Type Owner lets data and libraries be restricted to NFT Owners +* Data - App node data API with allow list access +* Data - Only initial data nodes can be replaced(ie. Data nodes must be added before mint); +* Identity - Token Mechanism lets a user get an access token to validate their HTTP requests so that we can show them owner-only data(single canister only) +* Collection - Retrieve token ids +* Minting - Metadata Upload +* Minting - Multi-asset handling +* Minting - Remote Storage Integration +* Minting - Free transfer +* DIP721 - TokenIDs are reversibly converted to a large NAT for Compatability +* DIP721 - bearer, owner, metadata functionality +* EXT - TokenIDs are converted to an ext style principal id. +* EXT - bearer, owner, metadata functionality +* Metadata - Report balances for escrow, sales, NFTs, offers +* Security - Secure queries provided for when consensus is required for query values +* Logging - basic logging +* Media - streaming callback for large files +* Media - video streaming for safari/ios via ICxProxy +* Media - Handle nft specific media +* Media - Handle collection media +* Media - Handle web-based media with a redirect +* Dapps - Wallet, Marketplace, Library Viewer, Data Viewer, Ledger Viewer + + +Future +* Marketplace - archive sale data +* Marketplace - Auctions - wait for quiet. +* Marketplace - Auctions - Percentage Increase +* Marketplace - Auctions - Dutch Auction +* Marketplace - Waivers Period for marketable NFTs +* Marketplace - Supports DIP20 Style Token - Pending Sub Account Solution +* Marketplace - Supports EXT Style Tokens - Pending Sub Account Solution +* Marketplace - Supports ICRC1 Style Tokens - Pending Finalization of Standard +* Marketplace - Batch Cycle Break +* Marketplace - Implement Marketable NFTs +* Marketplace - Automated Payouts +* Marketplace - Separate Payouts per Royalty to Subaccount +* Collection - Pagination and Field Selection for collection_nft_origyn +* Data - Granular Data Updates for Apps +* Data - Add Data Dapp Nodes +* Data - Storage Economics +* Data - blocklists(maybe...Sybil may make this useless) +* Data - role-based security(collection_owner, nft_owner,nft_of_collection_owner, former_nft_owner, former_nft_of_collection_owner) +* Storage - Automatic storage canister distribution and creation +* Storage - Erase an existing library item if mutable +* Storage - Immutable Library Items +* Storage - Collection Library Validation +* Storage - Permissioned Libraries +* Minting - Stage Batch Cycle Break +* Minting - Stage Library Batch Cycle Break +* Minting - Async workflow for notifying multiple storage canisters of metadata updates +* DIP721 - Implement Market-Based Transfer +* Marketable - Implement Marketable Rewards +* Marketable - Implement Waivers +* Marketable - Implement Required Actions +* Marketable - Auctions for Unminted NFTS +* Metadata - Report stake in an NFT +* Metadata - Hide unminted NFTs from balance calls not made by the manager/network/owner +* Metadata - Index balance functions for faster responses +* Metadata - Hide unminted items from bearer for non-manager/network/owner +* Ledger - Indexes across a canister +* Ledger - Search across the canister +* Ledger - Data Updates in Ledger +* Ledger - Archive and Query Blocks +* Logging - Canister Geek integration +* HTTP - handling new location types +* Dapps - Default Dapp Routes +* Dapps - Playground Dapp +* Dapps - Library writer +* Dapps - Data Writer +* Backup - Gateway and Storage Canister Backup schemas +* Spam Protection - Inspect message safeguards +* Identity - Multi canister token access mechanism + + diff --git a/docs/sample_calls.md b/docs/sample_calls.md new file mode 100644 index 0000000..6209720 --- /dev/null +++ b/docs/sample_calls.md @@ -0,0 +1,57 @@ + +Starting an auction. Be sure to update the end date + +``` +dfx canister call origyn_nft_reference market_transfer_nft_origyn '( record { + token_id="bayc-0"; + sales_config = record { + escrow_receipt = null; + pricing = variant { + auction = record{ + reserve = null; + token = variant { + ic = record{ + canister = principal "ryjl3-tyaaa-aaaaa-aaaba-cai"; + standard = variant {Ledger =null}; + decimals = 8:nat; + symbol = "ICP"; + fee = 10000; + } + }; + buy_now= null; + start_price = 100_000:nat; + start_date = 0; + ending = variant{date = 1650414333000000000:int}; + min_increase = variant{amount = 100_000:nat}; + } + } + } +})' +``` + +Checking balance + +dfx canister call origyn_nft_reference balance_of_nft_origyn '(variant {"principal" = principal "6i6da-t3dfv-vteyg-v5agl-tpgrm-63p4y-t5nmm-gi7nl-o72zu-jd3sc-7qe"})' + + +Checking the current sale of an NFT: + +dfx canister call origyn_nft_reference nft_origyn '("bayc-0")' + +getting the history of an nft: + +dfx canister call origyn_nft_reference history_nft_origyn '("bayc-0",null,null)' + + +end an auction: + +dfx canister call origyn_nft_reference end_sale_nft_origyn '("bayc-0")' + + +Set the canister to test mode + +dfx canister call origyn_nft_reference __set_time_mode '(variant{test=null})' + +Advance time (so you can end an auction); + +dfx canister call origyn_nft_reference __advance_time '(1650414333000001:int)' diff --git a/package-set.dhall b/package-set.dhall new file mode 100644 index 0000000..05a406d --- /dev/null +++ b/package-set.dhall @@ -0,0 +1,109 @@ +let aviate_labs = https://github.com/aviate-labs/package-set/releases/download/v0.1.3/package-set.dhall sha256:ca68dad1e4a68319d44c587f505176963615d533b8ac98bdb534f37d1d6a5b47 + +let upstream = https://github.com/dfinity/vessel-package-set/releases/download/mo-0.6.21-20220215/package-set.dhall sha256:b46f30e811fe5085741be01e126629c2a55d4c3d6ebf49408fb3b4a98e37589b + +let Package = + { name : Text, version : Text, repo : Text, dependencies : List Text } + + +let additions = + [ + { name = "candy_0_1_10" + , repo = "https://github.com/aramakme/candy_library.git" + , version = "v0.1.10" + , dependencies = ["base"] + }, + { + name="principalmo", + repo = "https://github.com/aviate-labs/principal.mo.git", + version = "v0.2.5", + dependencies = ["base"] + }, + { name = "crypto" + , repo = "https://github.com/aviate-labs/crypto.mo" + , version = "v0.2.0" + , dependencies = [ "base", "encoding" ] + }, + { name = "encoding" + , repo = "https://github.com/aviate-labs/encoding.mo" + , version = "v0.3.2" + , dependencies = [ "array", "base" ] + }, + { name = "array" + , repo = "https://github.com/aviate-labs/array.mo" + , version = "v0.2.0" + , dependencies = [ "base" ] + }, + { name = "hash" + , repo = "https://github.com/aviate-labs/hash.mo" + , version = "v0.1.0" + , dependencies = [ "array", "base" ] + }, + { + name = "ext", + repo = "https://github.com/skilesare/extendable-token", + version = "v0.1.0", + dependencies = ["ext"] +}, +{ + name = "httpparser", + repo = "https://github.com/skilesare/http-parser.mo", + version = "v0.1.0", + dependencies = ["base"] +}, +{ name = "http" + , repo = "https://github.com/aviate-labs/http.mo" + , version = "v0.1.0" + , dependencies = [ "base" ] + }, + { name = "format" + , repo = "https://github.com/skilesare/format.mo" + , version = "v0.1.0" + , dependencies = [ "base" ] + }, + { name = "json" + , repo = "https://github.com/aviate-labs/json.mo" + , version = "v0.1.0" + , dependencies = [ "base", "parser-combinators" ] + }, + { name = "stablerbtree_0_6_1" + , repo = "https://github.com/skilesare/StableRBTree" + , version = "v0.6.1" + , dependencies = [ "base"] + }, + { name = "stablebuffer_0_2_0" + , repo = "https://github.com/skilesare/StableBuffer" + , version = "v0.2.0" + , dependencies = [ "base"] + }, + { name = "stablebuffer" + , repo = "https://github.com/skilesare/StableBuffer" + , version = "v0.2.0" + , dependencies = [ "base"] + }, + { name = "map_6_0_0" + , repo = "https://github.com/ZhenyaUsenko/motoko-hash-map" + , version = "v6.0.0" + , dependencies = [ "base"] + }, + { name = "map" + , repo = "https://github.com/ZhenyaUsenko/motoko-hash-map" + , version = "v6.0.0" + , dependencies = [ "base"] + }] : List Package +let + {- This is where you can override existing packages in the package-set + + For example, if you wanted to use version `v2.0.0` of the foo library: + let overrides = [ + { name = "foo" + , version = "v2.0.0" + , repo = "https://github.com/bar/foo" + , dependencies = [] : List Text + } + ] + -} + overrides = + [] : List Package + +in aviate_labs # upstream # additions # overrides diff --git a/package.json b/package.json new file mode 100644 index 0000000..5a46358 --- /dev/null +++ b/package.json @@ -0,0 +1,36 @@ +{ + "name": "origyn_nft_reference", + "version": "0.1.0", + "description": "Origyn NFT Reference", + "license": "APACHE 2.0", + "keywords": [ + "Internet Computer", + "Motoko", + "Canister", + "NodeJS" + ], + "scripts": { + + }, + "type": "module", + "devDependencies": { + "@dfinity/agent": "^0.12.1", + "@dfinity/identity": "^0.12.1", + "@dfinity/principal": "^0.12.1", + "@peculiar/webcrypto": "^1.4.0", + "@types/node-fetch": "^2.6.2", + "bip39": "^3.0.4", + "fs-extra": "^10.1.0", + "hdkey": "^2.0.1", + "mime-types": "^2.1.35", + "minimist": "^1.2.6", + "node-fetch": "^3.2.9", + "watch": "^1.0.2" + }, + "prettier": { + "trailingComma": "es5", + "tabWidth": 4, + "semi": true, + "singleQuote": true + } +} diff --git a/principal_to_accountid.py b/principal_to_accountid.py new file mode 100644 index 0000000..acb50eb --- /dev/null +++ b/principal_to_accountid.py @@ -0,0 +1,37 @@ +import hashlib +import base64 +import math +import binascii +import sys + +"""Run `python3 principal_to_accountid.py {Principal}` + """ +if __name__ == '__main__': + # principal_id_str_in = "m7b5y-itxyr-mr2gt-kvadr-2dity-bh3n5-ff7bb-vvm2v-3ftew-5wjtg-2qe" + principal_id_str_in = sys.argv[1] + # print("converting {}".format(principal_id_str_in)) + subaccount = bytearray(32) + principal_id_str = principal_id_str_in.replace('-', '') + pad_length = math.ceil(len(principal_id_str) / 8) * \ + 8 - len(principal_id_str) + # print(principal_id_str) + principal_bytes = base64.b32decode( + principal_id_str.encode('ascii') + b'=' * pad_length, True, None) + principal_bytes = principal_bytes[4:] # remove CRC32 checksum bytes + ADS = b"\x0Aaccount-id" + h = hashlib.sha224() + h.update(ADS) + h.update(principal_bytes) + # print(subaccount) + h.update(subaccount) + + checksum = binascii.crc32(h.digest()) + checksum_bytes = checksum.to_bytes(4, byteorder='big') + + identifier = checksum_bytes + h.digest() + + # print(identifier) + + # print('account identifier {} of principal {}'.format( + # identifier.hex(), principal_id_str_in)) + print(identifier.hex()) \ No newline at end of file diff --git a/projects/bm/0.png b/projects/bm/0.png new file mode 100644 index 0000000..aa62b08 Binary files /dev/null and b/projects/bm/0.png differ diff --git a/projects/bm/1.png b/projects/bm/1.png new file mode 100644 index 0000000..5931b21 Binary files /dev/null and b/projects/bm/1.png differ diff --git a/projects/bm/10.png b/projects/bm/10.png new file mode 100644 index 0000000..eea30c6 Binary files /dev/null and b/projects/bm/10.png differ diff --git a/projects/bm/11.png b/projects/bm/11.png new file mode 100644 index 0000000..24f64e3 Binary files /dev/null and b/projects/bm/11.png differ diff --git a/projects/bm/12.png b/projects/bm/12.png new file mode 100644 index 0000000..fb4d374 Binary files /dev/null and b/projects/bm/12.png differ diff --git a/projects/bm/13.png b/projects/bm/13.png new file mode 100644 index 0000000..5629698 Binary files /dev/null and b/projects/bm/13.png differ diff --git a/projects/bm/14.png b/projects/bm/14.png new file mode 100644 index 0000000..06db48e Binary files /dev/null and b/projects/bm/14.png differ diff --git a/projects/bm/15.png b/projects/bm/15.png new file mode 100644 index 0000000..9b4002d Binary files /dev/null and b/projects/bm/15.png differ diff --git a/projects/bm/16.png b/projects/bm/16.png new file mode 100644 index 0000000..6a7c714 Binary files /dev/null and b/projects/bm/16.png differ diff --git a/projects/bm/17.png b/projects/bm/17.png new file mode 100644 index 0000000..cb477b5 Binary files /dev/null and b/projects/bm/17.png differ diff --git a/projects/bm/18.png b/projects/bm/18.png new file mode 100644 index 0000000..e489dba Binary files /dev/null and b/projects/bm/18.png differ diff --git a/projects/bm/19.png b/projects/bm/19.png new file mode 100644 index 0000000..3a00a35 Binary files /dev/null and b/projects/bm/19.png differ diff --git a/projects/bm/2.png b/projects/bm/2.png new file mode 100644 index 0000000..3747225 Binary files /dev/null and b/projects/bm/2.png differ diff --git a/projects/bm/3.png b/projects/bm/3.png new file mode 100644 index 0000000..39a5fc2 Binary files /dev/null and b/projects/bm/3.png differ diff --git a/projects/bm/4.png b/projects/bm/4.png new file mode 100644 index 0000000..d2cb11a Binary files /dev/null and b/projects/bm/4.png differ diff --git a/projects/bm/5.png b/projects/bm/5.png new file mode 100644 index 0000000..9090dd2 Binary files /dev/null and b/projects/bm/5.png differ diff --git a/projects/bm/6.png b/projects/bm/6.png new file mode 100644 index 0000000..116cfc2 Binary files /dev/null and b/projects/bm/6.png differ diff --git a/projects/bm/7.png b/projects/bm/7.png new file mode 100644 index 0000000..171e3f2 Binary files /dev/null and b/projects/bm/7.png differ diff --git a/projects/bm/8.png b/projects/bm/8.png new file mode 100644 index 0000000..e55524f Binary files /dev/null and b/projects/bm/8.png differ diff --git a/projects/bm/9.png b/projects/bm/9.png new file mode 100644 index 0000000..13d5c4a Binary files /dev/null and b/projects/bm/9.png differ diff --git a/projects/bm/build-dapps.sh b/projects/bm/build-dapps.sh new file mode 100644 index 0000000..556b1ae --- /dev/null +++ b/projects/bm/build-dapps.sh @@ -0,0 +1,37 @@ +if [ -f "gittoken.key" ] +then + token=$(head -n 1 gittoken.key) +else + echo "Enter your github personal access token (will be used to fetch relaese of dApps):" + read token + echo $token >> "gittoken.key" +fi + +owner="ORIGYN-SA" +repo="DApps" +tag="dapps-latest-build" +name="dist.zip" +downloaded_file_name="$tag.zip" +GH_API="https://api.github.com" +GH_REPO="$GH_API/repos/$owner/$repo" +GH_TAGS="$GH_REPO/releases/tags/$tag" +AUTH="Authorization: token $token" +WGET_ARGS="--content-disposition --auth-no-challenge --no-cookie" +CURL_ARGS="-LS" + +curl -o /dev/null -sH "$AUTH" $GH_REPO || { echo "Error: Invalid repo, token or network issue!"; exit 1; } + +response=$(curl -sH "$AUTH" $GH_TAGS) + +eval $(echo "$response" | grep -C3 "name.:.\+$name" | grep -w id | tr : = | tr -cd '[[:alnum:]]=') +[ "$id" ] || { echo "Error: Failed to get asset id, response: $response" | awk 'length($0)<100' >&2; exit 1; } +GH_ASSET="$GH_REPO/releases/assets/$id" +echo $GH_ASSET + +echo "Downloading asset..." >&2 +curl $CURL_ARGS -H "Authorization: token $token" -H 'Accept: application/octet-stream' "$GH_ASSET" -o "$downloaded_file_name" +echo "$0 done." >&2 + +rm -rf $tag +unzip "$downloaded_file_name" -d "$tag" +python3 ./projects/bm/update_dapps_in_collection.py \ No newline at end of file diff --git a/projects/bm/def.html b/projects/bm/def.html new file mode 100644 index 0000000..33b6e9b --- /dev/null +++ b/projects/bm/def.html @@ -0,0 +1,287 @@ + + + + Brain Matter - XXXXXX + + + +
+

Brain Matter XXXXXX

+ + +

Brain Matter Collection

+
+ +
+ +

bm 2

+
+
+ +

bm 3

+
+
+ +

bm 4

+
+
+ +

bm 5

+
+
+ +

bm 6

+
+
+ +

bm 7

+
+
+ +

bm 8

+
+
+ +

bm 9

+
+ + + + + + + + + + + +
+
+ + diff --git a/projects/bm/def.json b/projects/bm/def.json new file mode 100644 index 0000000..386753f --- /dev/null +++ b/projects/bm/def.json @@ -0,0 +1,175 @@ +{ + "meta" : { + "metadata" : { + "Class": [ + {"name":"id", "value":{"Text":"bm-XXXXXX"}, "immutable":true}, + {"name":"primary_asset", "value":{"Text":"com.bm.brain.XXXXXX.primary"}, "immutable":true}, + {"name":"preview_asset", "value":{"Text":"com.bm.brain.XXXXXX.primary"}, "immutable":true}, + {"name":"experience_asset", "value":{"Text":"com.bm.brain.XXXXXX.html"}, "immutable":true}, + {"name":"hidden_asset", "value":{"Text":"com.bm.brain.hidden"}, "immutable":true}, + {"name":"library", "value":{"Array":{ + "thawed": [ + {"Class":[ + {"name":"library_id", "value":{"Text":"com.bm.brain.XXXXXX.primary"}, "immutable":true}, + {"name":"title", "value":{"Text":"bm - brain XXXXXX"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"canister"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/com.bm.brain.XXXXXX.primary"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"image/png"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":500000}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + + {"Class":[ + {"name":"library_id", "value":{"Text":"com.bm.brain.hidden"}, "immutable":true}, + {"name":"title", "value":{"Text":"Random-brain"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"collection"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/com.bm.brain.XXXXXX.hidden2"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"image/gif"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":9990000}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + {"Class":[ + {"name":"library_id", "value":{"Text":"com.bm.brain.XXXXXX.html"}, "immutable":true}, + {"name":"title", "value":{"Text":"bm Sample"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"canister"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/com.bm.brain.XXXXXX.html"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"text/html"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":500000}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + {"Class":[ + {"name":"library_id", "value":{"Text":"wallet"}, "immutable":true}, + {"name":"title", "value":{"Text":"Wallet dApp"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"collection"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/wallet"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"text/html"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":1503756}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + {"Class":[ + {"name":"library_id", "value":{"Text":"ledger"}, "immutable":true}, + {"name":"title", "value":{"Text":"Ledger dApp"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"collection"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/ledger"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"text/html"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":974320}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + {"Class":[ + {"name":"library_id", "value":{"Text":"data"}, "immutable":true}, + {"name":"title", "value":{"Text":"Data dApp"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"collection"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/data"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"text/html"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":936231}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + {"Class":[ + {"name":"library_id", "value":{"Text":"marketplace"}, "immutable":true}, + {"name":"title", "value":{"Text":"Marketplace dApp"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"collection"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/marketplace"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"text/html"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":936231}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]}, + {"Class":[ + {"name":"library_id", "value":{"Text":"dapp_library"}, "immutable":true}, + {"name":"title", "value":{"Text":"Library dApp"}, "immutable":true}, + {"name":"location_type", "value":{"Text":"collection"}, "immutable":true}, + {"name":"location", "value":{"Text":"https://CANISTER-ID.raw.ic0.app/-/bm-XXXXXX/-/dapp_library"}, "immutable":true}, + {"name":"content_type", "value":{"Text":"text/html"}, "immutable":true}, + {"name":"content_hash", "value":{"Text":"1"}, "immutable":true}, + {"name":"size", "value":{"Nat":936231}, "immutable":true}, + {"name":"sort", "value":{"Text":"1"}, "immutable":true}, + {"name":"read","value":{"Text":"public"}, "immutable":false} + ]} + ] + }}, "immutable":false}, + {"name":"__apps", "value":{"Array":{ + "thawed": [ + {"Class":[ + {"name":"app_id", "value":{"Text":"com.bm.sample.app"}, "immutable":true}, + {"name":"read", "value":{"Text":"public"}, "immutable":false}, + {"name":"write", "value":{"Class":[ + {"name":"type", "value":{"Text":"allow"}, "immutable":false}, + {"name":"list", "value":{"Array":{ + "thawed": [ + {"Principal" : "APP-ID"} + ] + }}, "immutable":false} + + ]}, "immutable":false}, + {"name":"permissions", "value":{"Class":[ + {"name":"type", "value":{"Text":"allow"}, "immutable":false}, + {"name":"list", "value":{"Array":{ + "thawed": [ + {"Principal" : "APP-ID"} + ] + }}, "immutable":false} + + ]}, "immutable":false}, + {"name":"data", "value":{"Class":[ + {"name":"com.bm.sample.app.name", "value":{"Text":"brain XXXXXX"}, "immutable":false}, + {"name":"com.bm.sample.app.total_in_collection", "value":{"Nat":16}, "immutable":false}, + {"name":"com.bm.sample.app.creator_name", "value":{"Text":"bm"}, "immutable":false}, + {"name":"com.bm.sample.app.creator_principal", "value":{"Principal":"CREATOR-PRINCIPAL-ID"}, "immutable":false}, + {"name":"com..bm.sample.app.private_id", "value":{"Class":[ + {"name":"data", "value":{"Text":"you shouldn't see this"}, "immutable":false}, + {"name":"read", "value":{"Class":[ + {"name":"type", "value":{"Text":"allow"}, "immutable":false}, + {"name":"list", "value":{"Array":{ + "thawed": [ + {"Principal" : "APP-ID"} + ] + }}, "immutable":false} + + ]}, "immutable":false}, + {"name":"write", "value":{"Class":[ + {"name":"type", "value":{"Text":"allow"}, "immutable":false}, + {"name":"list", "value":{"Array":{ + "thawed": [ + {"Principal" : "APP-ID"} + ] + }}, "immutable":false} + + ]}, "immutable":false} + + ]}, "immutable":false} + + + ]}, "immutable":false} + ]} + ] + }}, "immutable":true}, + {"name":"owner", "value":{"Principal":"CANISTER-ID"}, "immutable":false}, + {"name":"is_soulbound", "value":{"Bool":YYYYYY}, "immutable":YYYYYY} + + ] + } + }, + "library" :[ + { + "library_id": "com.bm.brain.XXXXXX.primary", + "library_file": "./projects/bm/XXXXXX.png" + }, + { + "library_id": "com.bm.brain.XXXXXX.html", + "library_file": "./projects/bm/XXXXXX.html" + } + ] +} \ No newline at end of file diff --git a/projects/bm/def_collection.json b/projects/bm/def_collection.json new file mode 100644 index 0000000..1b71100 --- /dev/null +++ b/projects/bm/def_collection.json @@ -0,0 +1,680 @@ +{ + "meta": { + "metadata": { + "Class": [ + { "name": "id", "value": { "Text": "" }, "immutable": true }, + { + "name": "primary_asset", + "value": { "Text": "com.bm.brain.1.primary" }, + "immutable": true + }, + { + "name": "preview_asset", + "value": { "Text": "com.bm.brain.1.primary" }, + "immutable": true + }, + { + "name": "experience_asset", + "value": { "Text": "com.bm.brain.1.html" }, + "immutable": true + }, + { + "name": "hidden_asset", + "value": { "Text": "com.bm.brain.hidden" }, + "immutable": true + }, + { + "name": "library", + "value": { + "Array": { + "thawed": [ + { + "Class": [ + { + "name": "library_id", + "value": { + "Text": "com.bm.brain.hidden" + }, + "immutable": true + }, + { + "name": "title", + "value": { "Text": "Random-brain" }, + "immutable": true + }, + { + "name": "location_type", + "value": { "Text": "canister" }, + "immutable": true + }, + { + "name": "location", + "value": { + "Text": "https://CANISTER-ID.raw.ic0.app/collection/-/com.bm.brain.XXXXXX.hidden" + }, + "immutable": true + }, + { + "name": "content_type", + "value": { "Text": "image/gif" }, + "immutable": true + }, + { + "name": "content_hash", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "size", + "value": { "Nat": 9990000 }, + "immutable": true + }, + { + "name": "sort", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + } + ] + }, + + { + "Class": [ + { + "name": "library_id", + "value": { "Text": "ledger" }, + "immutable": true + }, + { + "name": "title", + "value": { "Text": "NFT ledger" }, + "immutable": true + }, + { + "name": "location_type", + "value": { "Text": "canister" }, + "immutable": true + }, + { + "name": "location", + "value": { + "Text": "https://CANISTER-ID.raw.ic0.app/collection/-/ledger" + }, + "immutable": true + }, + { + "name": "content_type", + "value": { "Text": "text/html" }, + "immutable": true + }, + { + "name": "content_hash", + "value": { + "Text": "dapp_ledger_hash" + }, + "immutable": true + }, + { + "name": "size", + "value": { + "Nat": dapp_ledger_size + }, + "immutable": true + }, + { + "name": "sort", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "library_id", + "value": { "Text": "wallet" }, + "immutable": true + }, + { + "name": "title", + "value": { "Text": "Wallet" }, + "immutable": true + }, + { + "name": "location_type", + "value": { "Text": "canister" }, + "immutable": true + }, + { + "name": "location", + "value": { + "Text": "https://CANISTER-ID.raw.ic0.app/collection/-/wallet" + }, + "immutable": true + }, + { + "name": "content_type", + "value": { "Text": "text/html" }, + "immutable": true + }, + { + "name": "content_hash", + "value": { + "Text": "dapp_wallet_hash" + }, + "immutable": true + }, + { + "name": "size", + "value": { + "Nat": dapp_wallet_size + }, + "immutable": true + }, + { + "name": "sort", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "library_id", + "value": { "Text": "dapp_library" }, + "immutable": true + }, + { + "name": "title", + "value": { "Text": "Library dApp" }, + "immutable": true + }, + { + "name": "location_type", + "value": { "Text": "canister" }, + "immutable": true + }, + { + "name": "location", + "value": { + "Text": "https://CANISTER-ID.raw.ic0.app/collection/-/library" + }, + "immutable": true + }, + { + "name": "content_type", + "value": { "Text": "text/html" }, + "immutable": true + }, + { + "name": "content_hash", + "value": { + "Text": "dapp_library_hash" + }, + "immutable": true + }, + { + "name": "size", + "value": { + "Nat": dapp_library_size + }, + "immutable": true + }, + { + "name": "sort", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "library_id", + "value": { "Text": "data" }, + "immutable": true + }, + { + "name": "title", + "value": { "Text": "Data dApp" }, + "immutable": true + }, + { + "name": "location_type", + "value": { "Text": "canister" }, + "immutable": true + }, + { + "name": "location", + "value": { + "Text": "https://CANISTER-ID.raw.ic0.app/-/bm-01/-/data" + }, + "immutable": true + }, + { + "name": "content_type", + "value": { "Text": "text/html" }, + "immutable": true + }, + { + "name": "content_hash", + "value": { + "Text": "dapp_nftdata_hash" + }, + "immutable": true + }, + { + "name": "size", + "value": { + "Nat": dapp_nftdata_size + }, + "immutable": true + }, + { + "name": "sort", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "library_id", + "value": { "Text": "marketplace" }, + "immutable": true + }, + { + "name": "title", + "value": { "Text": "Marketplace" }, + "immutable": true + }, + { + "name": "location_type", + "value": { "Text": "canister" }, + "immutable": true + }, + { + "name": "location", + "value": { + "Text": "https://CANISTER-ID.raw.ic0.app/collection/-/marketplace" + }, + "immutable": true + }, + { + "name": "content_type", + "value": { "Text": "text/html" }, + "immutable": true + }, + { + "name": "content_hash", + "value": { + "Text": "dapp_marketplace_hash" + }, + "immutable": true + }, + { + "name": "size", + "value": { + "Nat": dapp_marketplace_size + }, + "immutable": true + }, + { + "name": "sort", + "value": { "Text": "1" }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + } + ] + } + ] + } + }, + "immutable": false + }, + { + "name": "default_royalty_primary", + "value": { + "Array": { + "thawed": [ + { + "Class": [ + { + "name": "tag", + "value": { + "Text": "com.origyn.royalty.broker" + }, + "immutable": true + }, + { + "name": "rate", + "value": { "Float": 0.05 }, + "immutable": true + }, + { + "name": "account", + "value": { + "Principal": "CANISTER-ID" + }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "tag", + "value": { + "Text": "com.origyn.royalty.node" + }, + "immutable": true + }, + { + "name": "rate", + "value": { "Float": 0.005 }, + "immutable": true + }, + { + "name": "account", + "value": { + "Principal": "CANISTER-ID" + }, + "immutable": false + } + ] + } + ] + } + }, + "immutable": false + }, + { + "name": "default_royalty_secondary", + "value": { + "Array": { + "thawed": [ + { + "Class": [ + { + "name": "tag", + "value": { + "Text": "com.origyn.royalty.broker" + }, + "immutable": true + }, + { + "name": "rate", + "value": { "Float": 0.05 }, + "immutable": true + }, + { + "name": "account", + "value": { + "Principal": "CANISTER-ID" + }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "tag", + "value": { + "Text": "com.origyn.royalty.node" + }, + "immutable": true + }, + { + "name": "rate", + "value": { "Float": 0.005 }, + "immutable": true + }, + { + "name": "account", + "value": { + "Principal": "CANISTER-ID" + }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "tag", + "value": { + "Text": "com.origyn.royalty.originator" + }, + "immutable": true + }, + { + "name": "rate", + "value": { "Float": 0.05 }, + "immutable": true + }, + { + "name": "account", + "value": { + "Principal": "CANISTER-ID" + }, + "immutable": false + } + ] + }, + { + "Class": [ + { + "name": "tag", + "value": { + "Text": "com.origyn.royalty.custom" + }, + "immutable": true + }, + { + "name": "rate", + "value": { "Float": 0.05 }, + "immutable": true + }, + { + "name": "account", + "value": { + "Principal": "CANISTER-ID" + }, + "immutable": false + } + ] + } + ] + } + }, + "immutable": false + }, + { + "name": "__apps", + "value": { + "Array": { + "thawed": [ + { + "Class": [ + { + "name": "app_id", + "value": { + "Text": "collectionData" + }, + "immutable": true + }, + { + "name": "read", + "value": { "Text": "public" }, + "immutable": false + }, + { + "name": "write", + "value": { + "Class": [ + { + "name": "type", + "value": { + "Text": "allow" + }, + "immutable": false + }, + { + "name": "list", + "value": { + "Array": { + "thawed": [ + { + "Principal": "APP-ID" + } + ] + } + }, + "immutable": false + } + ] + }, + "immutable": false + }, + { + "name": "permissions", + "value": { + "Class": [ + { + "name": "type", + "value": { + "Text": "allow" + }, + "immutable": false + }, + { + "name": "list", + "value": { + "Array": { + "thawed": [ + { + "Principal": "APP-ID" + } + ] + } + }, + "immutable": false + } + ] + }, + "immutable": false + }, + { + "name": "data", + "value": { + "Class": [ + { + "name": "name", + "value": { + "Text": "Collection Name" + }, + "immutable": false + }, + { + "name": "description", + "value": { + "Text": "NFTs and/or collection description" + }, + "immutable": false + }, + { + "name": "com.bm.sample.app.creator_name", + "value": { + "Text": "bm" + }, + "immutable": false + }, + { + "name": "com.bm.sample.app.creator_principal", + "value": { + "Principal": "CREATOR-PRINCIPAL-ID" + }, + "immutable": false + } + ] + }, + "immutable": false + } + ] + } + ] + } + }, + "immutable": true + }, + { + "name": "owner", + "value": { "Principal": "CANISTER-ID" }, + "immutable": false + }, + { + "name": "is_soulbound", + "value": { "Bool": false }, + "immutable": false + } + ] + } + }, + "library": [ + { + "library_id": "com.bm.brain.hidden", + "library_file": "./projects/bm/mystery-bm.gif" + }, + { + "library_id": "ledger", + "library_file": "./dapps-latest-build/dist/ledger.html" + }, + { + "library_id": "data", + "library_file": "./dapps-latest-build/dist/nftData.html" + }, + { + "library_id": "wallet", + "library_file": "./dapps-latest-build/dist/wallet.html" + }, + { + "library_id": "dapp_library", + "library_file": "./dapps-latest-build/dist/library.html" + }, + { + "library_id": "marketplace", + "library_file": "./dapps-latest-build/dist/marketplace.html" + } + ] +} diff --git a/projects/bm/deploybm-dev.sh b/projects/bm/deploybm-dev.sh new file mode 100644 index 0000000..e9dd76e --- /dev/null +++ b/projects/bm/deploybm-dev.sh @@ -0,0 +1,10 @@ +set -ex +env_network='ic' +env_prod='true' +env_name='origyn_nft_reference_dev' +env_name_sale='dev_sales_canister' +export env_network +export env_prod +export env_name +export env_name_sale +bash ./projects/bm/deploybm.sh \ No newline at end of file diff --git a/projects/bm/deploybm-local.sh b/projects/bm/deploybm-local.sh new file mode 100644 index 0000000..1e651d4 --- /dev/null +++ b/projects/bm/deploybm-local.sh @@ -0,0 +1,10 @@ +set -ex +env_network='local' +env_prod='false' +env_name='origyn_nft_reference' +env_name_sale='origyn_sale_reference' +export env_network +export env_prod +export env_name +export env_name_sale +bash ./projects/bm/deploybm.sh \ No newline at end of file diff --git a/projects/bm/deploybm-stage.sh b/projects/bm/deploybm-stage.sh new file mode 100644 index 0000000..baa79ae --- /dev/null +++ b/projects/bm/deploybm-stage.sh @@ -0,0 +1,10 @@ +set -ex +env_network='ic' +env_prod='true' +env_name='origyn_nft_reference_stage' +env_name_sale='origyn_sale_reference_stage' +export env_network +export env_prod +export env_name +export env_name_sale +bash ./projects/bm/deploybm.sh \ No newline at end of file diff --git a/projects/bm/deploybm.sh b/projects/bm/deploybm.sh new file mode 100644 index 0000000..02088fb --- /dev/null +++ b/projects/bm/deploybm.sh @@ -0,0 +1,142 @@ +set -ex + +npm install + +dfx identity import local_nft_deployer --disable-encryption identity.pem || true +dfx identity use local_nft_deployer + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + + +dfx identity --network $env_network set-wallet $(dfx identity get-principal) || true + + +dfx canister --network local create origyn_nft_reference || true +dfx canister --network local create origyn_sale_reference || true +dfx canister --network $env_network create $env_name || true +dfx canister --network $env_network create $env_name_sale || true + +NFT_CANISTER_ID=$(dfx canister --network $env_network id $env_name) +NFT_CANISTER_Account=$(python3 principal_to_accountid.py $NFT_CANISTER_ID) + +NFT_Sale_ID=$(dfx canister --network $env_network id $env_name_sale) +NFT_Sale_Account=$(python3 principal_to_accountid.py $NFT_Sale_ID) + +echo $NFT_CANISTER_ID +echo $NFT_CANISTER_Account + +awk "{gsub(\"CANISTER-ID\",\"$NFT_CANISTER_ID\"); print}" ./projects/bm/def.json > ./projects/bm/def_loaded_1.json +awk "{gsub(\"APP-ID\",\"$ADMIN_PRINCIPAL\"); print}" ./projects/bm/def_loaded_1.json > ./projects/bm/def_loaded_2.json +awk "{gsub(\"CREATOR-PRINCIPAL-ID\",\"$ADMIN_PRINCIPAL\"); print}" ./projects/bm/def_loaded_2.json > ./projects/bm/def_loaded.json + +bash ./projects/bm/build-dapps.sh + +awk "{gsub(\"CANISTER-ID\",\"$NFT_CANISTER_ID\"); print}" ./projects/bm/def_collection_build.json > ./projects/bm/def_collection_1.json +awk "{gsub(\"APP-ID\",\"$ADMIN_PRINCIPAL\"); print}" ./projects/bm/def_collection_1.json > ./projects/bm/def_collection_2.json +awk "{gsub(\"CREATOR-PRINCIPAL-ID\",\"$ADMIN_PRINCIPAL\"); print}" ./projects/bm/def_collection_2.json > ./projects/bm/def_collection_loaded.json + +awk "{gsub(\"XXXXXX\",\"0\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_0a.json +awk "{gsub(\"XXXXXX\",\"1\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_1a.json +awk "{gsub(\"XXXXXX\",\"2\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_2a.json +awk "{gsub(\"XXXXXX\",\"3\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_3a.json +awk "{gsub(\"XXXXXX\",\"4\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_4a.json +awk "{gsub(\"XXXXXX\",\"5\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_5a.json +awk "{gsub(\"XXXXXX\",\"6\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_6a.json +awk "{gsub(\"XXXXXX\",\"7\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_7a.json +awk "{gsub(\"XXXXXX\",\"8\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_8a.json +awk "{gsub(\"XXXXXX\",\"9\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_9a.json +awk "{gsub(\"XXXXXX\",\"10\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_10a.json +awk "{gsub(\"XXXXXX\",\"11\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_11a.json +awk "{gsub(\"XXXXXX\",\"12\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_12a.json +awk "{gsub(\"XXXXXX\",\"13\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_13a.json +awk "{gsub(\"XXXXXX\",\"14\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_14a.json +awk "{gsub(\"XXXXXX\",\"15\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_15a.json +awk "{gsub(\"XXXXXX\",\"16\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_16a.json +awk "{gsub(\"XXXXXX\",\"17\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_17a.json +awk "{gsub(\"XXXXXX\",\"18\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_18a.json +awk "{gsub(\"XXXXXX\",\"19\"); print}" ./projects/bm/def_loaded.json > ./projects/bm/def_19a.json + +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_0a.json > ./projects/bm/def_0.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_1a.json > ./projects/bm/def_1.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_2a.json > ./projects/bm/def_2.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_3a.json > ./projects/bm/def_3.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_4a.json > ./projects/bm/def_4.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_5a.json > ./projects/bm/def_5.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_6a.json > ./projects/bm/def_6.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_7a.json > ./projects/bm/def_7.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_8a.json > ./projects/bm/def_8.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_9a.json > ./projects/bm/def_9.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_10a.json > ./projects/bm/def_10.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_11a.json > ./projects/bm/def_11.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_12a.json > ./projects/bm/def_12.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_13a.json > ./projects/bm/def_13.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_14a.json > ./projects/bm/def_14.json +awk "{gsub(\"YYYYYY\",\"false\"); print}" ./projects/bm/def_15a.json > ./projects/bm/def_15.json +awk "{gsub(\"YYYYYY\",\"true\"); print}" ./projects/bm/def_16a.json > ./projects/bm/def_16.json +awk "{gsub(\"YYYYYY\",\"true\"); print}" ./projects/bm/def_17a.json > ./projects/bm/def_17.json +awk "{gsub(\"YYYYYY\",\"true\"); print}" ./projects/bm/def_18a.json > ./projects/bm/def_18.json +awk "{gsub(\"YYYYYY\",\"true\"); print}" ./projects/bm/def_19a.json > ./projects/bm/def_19.json + + +awk "{gsub(\"XXXXXX\",\"0\"); print}" ./projects/bm/def.html > ./projects/bm/0.html +awk "{gsub(\"XXXXXX\",\"1\"); print}" ./projects/bm/def.html > ./projects/bm/1.html +awk "{gsub(\"XXXXXX\",\"2\"); print}" ./projects/bm/def.html > ./projects/bm/2.html +awk "{gsub(\"XXXXXX\",\"3\"); print}" ./projects/bm/def.html > ./projects/bm/3.html +awk "{gsub(\"XXXXXX\",\"4\"); print}" ./projects/bm/def.html > ./projects/bm/4.html +awk "{gsub(\"XXXXXX\",\"5\"); print}" ./projects/bm/def.html > ./projects/bm/5.html +awk "{gsub(\"XXXXXX\",\"6\"); print}" ./projects/bm/def.html > ./projects/bm/6.html +awk "{gsub(\"XXXXXX\",\"7\"); print}" ./projects/bm/def.html > ./projects/bm/7.html +awk "{gsub(\"XXXXXX\",\"8\"); print}" ./projects/bm/def.html > ./projects/bm/8.html +awk "{gsub(\"XXXXXX\",\"9\"); print}" ./projects/bm/def.html > ./projects/bm/9.html +awk "{gsub(\"XXXXXX\",\"10\"); print}" ./projects/bm/def.html > ./projects/bm/10.html +awk "{gsub(\"XXXXXX\",\"11\"); print}" ./projects/bm/def.html > ./projects/bm/11.html +awk "{gsub(\"XXXXXX\",\"12\"); print}" ./projects/bm/def.html > ./projects/bm/12.html +awk "{gsub(\"XXXXXX\",\"13\"); print}" ./projects/bm/def.html > ./projects/bm/13.html +awk "{gsub(\"XXXXXX\",\"14\"); print}" ./projects/bm/def.html > ./projects/bm/14.html +awk "{gsub(\"XXXXXX\",\"15\"); print}" ./projects/bm/def.html > ./projects/bm/15.html +awk "{gsub(\"XXXXXX\",\"16\"); print}" ./projects/bm/def.html > ./projects/bm/16.html +awk "{gsub(\"XXXXXX\",\"17\"); print}" ./projects/bm/def.html > ./projects/bm/17.html +awk "{gsub(\"XXXXXX\",\"18\"); print}" ./projects/bm/def.html > ./projects/bm/18.html +awk "{gsub(\"XXXXXX\",\"19\"); print}" ./projects/bm/def.html > ./projects/bm/19.html + + +dfx build --network local origyn_nft_reference +dfx build --network local origyn_sale_reference + +gzip .dfx/local/canisters/origyn_nft_reference/origyn_nft_reference.wasm -f +gzip .dfx/local/canisters/origyn_sale_reference/origyn_sale_reference.wasm -f + +#Replace below with your test principal +TEST_WALLET=$(echo "coapo-5z5t4-5azo7-idouv-jsvee-vzf6k-33ror-oncap-be2yg-6cavw-pqe") + + +dfx canister --network $env_network install $env_name --wasm .dfx/local/canisters/origyn_nft_reference/origyn_nft_reference.wasm.gz --mode=reinstall --argument "(record {owner =principal \"$ADMIN_PRINCIPAL\"; storage_space = null;})" +dfx canister --network $env_network install $env_name_sale --wasm .dfx/local/canisters/origyn_sale_reference/origyn_sale_reference.wasm.gz --mode=reinstall --argument "(record {owner=principal \"$ADMIN_PRINCIPAL\"; allocation_expiration = 450000000000; nft_gateway= opt principal \"$NFT_CANISTER_ID\"; sale_open_date=null; registration_date = null; end_date = null; required_lock_date=null})" + +node ./projects/deploy.js --meta=./projects/bm/def_collection_loaded.json --token_id="" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_0.json --token_id="bm-0" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_1.json --token_id="bm-1" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_2.json --token_id="bm-2" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_3.json --token_id="bm-3" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_4.json --token_id="bm-4" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_5.json --token_id="bm-5" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_6.json --token_id="bm-6" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_7.json --token_id="bm-7" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_8.json --token_id="bm-8" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_9.json --token_id="bm-9" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_10.json --token_id="bm-10" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_11.json --token_id="bm-11" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_12.json --token_id="bm-12" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_13.json --token_id="bm-13" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_14.json --token_id="bm-14" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_15.json --token_id="bm-15" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_16.json --token_id="bm-16" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_17.json --token_id="bm-17" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=true --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_18.json --token_id="bm-18" --mint_target=$TEST_WALLET --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod +node ./projects/deploy.js --meta=./projects/bm/def_19.json --token_id="bm-19" --mint_target=$ADMIN_PRINCIPAL --nft_canister=$NFT_CANISTER_ID --mint=false --prod=$env_prod + +rm def_collection_build.json diff --git a/projects/bm/mystery-bm.gif b/projects/bm/mystery-bm.gif new file mode 100644 index 0000000..5bb4976 Binary files /dev/null and b/projects/bm/mystery-bm.gif differ diff --git a/projects/bm/principal_to_accountid.py b/projects/bm/principal_to_accountid.py new file mode 100644 index 0000000..acb50eb --- /dev/null +++ b/projects/bm/principal_to_accountid.py @@ -0,0 +1,37 @@ +import hashlib +import base64 +import math +import binascii +import sys + +"""Run `python3 principal_to_accountid.py {Principal}` + """ +if __name__ == '__main__': + # principal_id_str_in = "m7b5y-itxyr-mr2gt-kvadr-2dity-bh3n5-ff7bb-vvm2v-3ftew-5wjtg-2qe" + principal_id_str_in = sys.argv[1] + # print("converting {}".format(principal_id_str_in)) + subaccount = bytearray(32) + principal_id_str = principal_id_str_in.replace('-', '') + pad_length = math.ceil(len(principal_id_str) / 8) * \ + 8 - len(principal_id_str) + # print(principal_id_str) + principal_bytes = base64.b32decode( + principal_id_str.encode('ascii') + b'=' * pad_length, True, None) + principal_bytes = principal_bytes[4:] # remove CRC32 checksum bytes + ADS = b"\x0Aaccount-id" + h = hashlib.sha224() + h.update(ADS) + h.update(principal_bytes) + # print(subaccount) + h.update(subaccount) + + checksum = binascii.crc32(h.digest()) + checksum_bytes = checksum.to_bytes(4, byteorder='big') + + identifier = checksum_bytes + h.digest() + + # print(identifier) + + # print('account identifier {} of principal {}'.format( + # identifier.hex(), principal_id_str_in)) + print(identifier.hex()) \ No newline at end of file diff --git a/projects/bm/update_dapps_in_collection.py b/projects/bm/update_dapps_in_collection.py new file mode 100644 index 0000000..9ddcf87 --- /dev/null +++ b/projects/bm/update_dapps_in_collection.py @@ -0,0 +1,52 @@ +import json +import hashlib +import os + +dapps_folder="./dapps-latest-build/dist/" +dapps = [ + { + "file_name": "wallet.html", + "key_size": "dapp_wallet_size", + "key_hash": "dapp_wallet_hash", + }, + { + "file_name": "marketplace.html", + "key_size": "dapp_marketplace_size", + "key_hash": "dapp_marketplace_hash", + }, + { + "file_name": "ledger.html", + "key_size": "dapp_ledger_size", + "key_hash": "dapp_ledger_hash", + }, + { + "file_name": "library.html", + "key_size": "dapp_library_size", + "key_hash": "dapp_library_hash", + }, + { + "file_name": "nftData.html", + "key_size": "dapp_nftdata_size", + "key_hash": "dapp_nftdata_hash", + }, +] +def sha256(fname): + sha256_hash = hashlib.sha256() + with open(fname, "rb") as f: + for chunk in iter(lambda: f.read(4096), b""): + sha256_hash.update(chunk) + return sha256_hash.hexdigest() + +file = open("./projects/bm/def_collection.json") +def_collection = file.read() +file.seek(0) + +json_dump = json.dumps(def_collection) +for dapp in dapps: + file_size = os.stat(dapps_folder + dapp["file_name"]).st_size + file_hash = sha256(dapps_folder + dapp["file_name"]) + json_dump = json_dump.replace(dapp["key_size"], str(file_size)) + json_dump = json_dump.replace(dapp["key_hash"], file_hash) + +new_file = open("./projects/bm/def_collection_build.json", "w+") +new_file.write(json.loads(json_dump)) \ No newline at end of file diff --git a/projects/deploy.js b/projects/deploy.js new file mode 100644 index 0000000..5ac64b9 --- /dev/null +++ b/projects/deploy.js @@ -0,0 +1,211 @@ +import fs from 'fs'; +import minimist from 'minimist'; +import fetch from 'node-fetch'; +import hdkey from 'hdkey'; +import bip39 from 'bip39'; +import ICAgent from '@dfinity/agent'; +import { Principal } from '@dfinity/principal'; +import { Crypto } from '@peculiar/webcrypto'; +import { Secp256k1KeyIdentity } from '@dfinity/identity'; +import { idlFactory } from '../.dfx/local/canisters/origyn_nft_reference/origyn_nft_reference.did.js'; + +(async () => { + var argv = minimist(process.argv.slice(2)); + var seedfile = 'seed.txt'; + console.log('testing seed:', argv.seed); + if (argv.seed && argv.seed.length > 0) { + console.log('setting to alt seed'); + seedfile = argv.seed; + } + + const phrase = fs.readFileSync(seedfile).toString().trim(); + + console.log('the phrase', phrase); + + let identityFromSeed = async (phrase) => { + const seed = await bip39.mnemonicToSeed(phrase); + const root = hdkey.fromMasterSeed(seed); + const addrnode = root.derive("m/44'/223'/0'/0/0"); + + return Secp256k1KeyIdentity.fromSecretKey(addrnode.privateKey); + }; + + let identity = await identityFromSeed(phrase); + + console.dir(argv); + const NFT_ID = argv.nft_canister; + + console.log('NFTID', NFT_ID); + + global.crypto = new Crypto(); + + function getAgent() { + return new ICAgent.HttpAgent({ + fetch: fetch, + host: ICP_ENDPOINT, + identity: identity, //await window.plug.getIdentity() + }); + } + + //console.log("Anonymous Identity ", anonIdentity.getPrincipal().toText()); + + var ICP_ENDPOINT = 'http://localhost:8000'; + console.log('arge is ', argv.prod); + if (argv.prod == 'true') { + console.log('in prod'); + ICP_ENDPOINT = 'https://boundary.ic0.app'; + } + + const agent = getAgent(); + + if (argv.prod != 'true') { + agent.fetchRootKey(); + } + + console.log(agent); + //const actorClass = ICAgent.Actor.createActorClass(did); + + console.log('canister id', Principal.fromText(NFT_ID)); + console.log('factory', idlFactory); + const actor = ICAgent.Actor.createActor(idlFactory, { + agent: agent, + canisterId: Principal.fromText(NFT_ID), + }); + + console.log('the actor', actor); + console.log(actor.stage_nft_origyn); + + // console.log(chunks); + + const thejson = fs.readFileSync(argv.meta); + console.log('logging json'); + console.log(thejson); + const data = JSON.parse(thejson); + console.log('thejson', data); + + const iterateObj = (dupeObj) => { + var retObj = new Object(); + if (typeof dupeObj == 'object') { + if (typeof dupeObj.length == 'number') var retObj = new Array(); + + for (var objInd in dupeObj) { + if (dupeObj[objInd] == null) dupeObj[objInd] = 'Empty'; + if (typeof dupeObj[objInd] == 'object') { + retObj[objInd] = iterateObj(dupeObj[objInd]); + } else if (typeof dupeObj[objInd] == 'string') { + if (objInd == 'Principal') { + retObj[objInd] = Principal.fromText(dupeObj[objInd]); + } else if (objInd == 'Nat') { + retObj[objInd] = BigInt(dupeObj[objInd]); + } else { + retObj[objInd] = dupeObj[objInd]; + } + } else if (typeof dupeObj[objInd] == 'number') { + retObj[objInd] = dupeObj[objInd]; + } else if (typeof dupeObj[objInd] == 'boolean') { + dupeObj[objInd] == true + ? (retObj[objInd] = true) + : (retObj[objInd] = false); + } + } + } + return retObj; + }; + + const data2 = iterateObj(data); + + let nft = null; + + const stageNft = async (data2) => { + try { + nft = await actor.stage_nft_origyn(data2.meta); + return nft; + } catch (e) { + console.log(`There was an error while staging the nft:`); + console.log(e); + await new Promise((resolve) => setTimeout(resolve, 3000)); + return await stageNft(data2); + } + }; + nft = await stageNft(data2); + + console.log('the result of stage', nft); + + console.log('thelibrary', data.library); + + for (const this_item of data.library) { + let library_id = this_item.library_id; + const imageSource = this_item.library_file; + //console.log(this_item); + //if(imageSource.indexOf("social")>-1){ + const filedata = fs.readFileSync(imageSource); + + const SIZE_CHUNK = 2048000; // two megabytes + //const SIZE_CHUNK = 128; // two megabytes + + const chunks = []; + + for (var i = 0; i < filedata.byteLength / SIZE_CHUNK; i++) { + const startIndex = i * SIZE_CHUNK; + chunks.push(filedata.slice(startIndex, startIndex + SIZE_CHUNK)); + } + let results = []; + + const stageLibraryNft = async (content, token_id, i, library_id) => { + try { + const res = await actor.stage_library_nft_origyn({ + content: content, + token_id: token_id, + chunk: i, + filedata: { Empty: null }, + library_id: library_id, + }); + return res; + } catch (e) { + console.log( + `There was an error while staging the nft library:` + ); + console.log(e); + await new Promise((resolve) => setTimeout(resolve, 3000)); + return await stageLibraryNft(content, token_id, i, library_id); + } + }; + for (let i = 0; i < chunks.length; i++) { + const chnk = chunks[i]; + console.log('appending item ', i); + const result = await stageLibraryNft( + Array.from(chnk), + argv.token_id.toString(), + i, + library_id + ); + console.log(result); + } + //} + } + + //Promise.allSettled(results).then( (resultList) =>{ + + const mintNft = async (token_id, mint_target) => { + try { + const res = await actor.mint_nft_origyn(token_id, { + principal: Principal.fromText(mint_target), + }); + return res; + } catch (e) { + console.log(`There was an error while miting the nft:`); + console.log(e); + await new Promise((resolve) => setTimeout(resolve, 3000)); + return await mintNft(token_id, mint_target); + } + }; + if (argv.mint == 'true') { + console.log('minting'); + let result = await mintNft(argv.token_id.toString(), argv.mint_target); + console.log(result); + } + + console.log('installed done.'); + process.exit(0); + //}); +})(); diff --git a/runners/nft_sale_runner.sh b/runners/nft_sale_runner.sh new file mode 100644 index 0000000..c8dd4ce --- /dev/null +++ b/runners/nft_sale_runner.sh @@ -0,0 +1,13 @@ +set -ex + + +dfx identity new sales_nft_ref || true +dfx identity use sales_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +dfx deploy origyn_sale_reference --mode=reinstall --argument "(record {owner = principal \"$ADMIN_PRINCIPAL\"})" diff --git a/runners/test_runner.sh b/runners/test_runner.sh new file mode 100644 index 0000000..7c3ce24 --- /dev/null +++ b/runners/test_runner.sh @@ -0,0 +1,126 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +dfx canister create test_runner +dfx canister create test_runner_nft +dfx canister create test_runner_nft_2 +dfx canister create test_runner_instant_transfer +dfx canister create test_runner_data +dfx canister create test_runner_utils +dfx canister create test_runner_collection +dfx canister create test_runner_storage +dfx canister create test_runner_sale +dfx canister create dfxledger +dfx canister create dfxledger2 +dfx canister create test_canister_factory +dfx canister create test_storage_factory + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_NFT_CANISTER_ID=$(dfx canister id test_runner_nft) +TEST_RUNNER_NFT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_ID) + +TEST_RUNNER_NFT_CANISTER_2_ID=$(dfx canister id test_runner_nft_2) +TEST_RUNNER_NFT_ACCOUNT_2_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_2_ID) + +TEST_RUNNER_STORAGE_CANISTER_ID=$(dfx canister id test_runner_storage) +TEST_RUNNER_STORAGE_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_STORAGE_CANISTER_ID) + +TEST_RUNNER_INSTANT_CANISTER_ID=$(dfx canister id test_runner_instant_transfer) +TEST_RUNNER_INSTANT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_2_ID) + +TEST_RUNNER_DATA_CANISTER_ID=$(dfx canister id test_runner_data) +TEST_RUNNER_DATA_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_INSTANT_CANISTER_ID) + +TEST_RUNNER_UTILS_CANISTER_ID=$(dfx canister id test_runner_utils) +TEST_RUNNER_UTILS_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_UTILS_CANISTER_ID) + +TEST_RUNNER_SALE_CANISTER_ID=$(dfx canister id test_runner_utils) +TEST_RUNNER_SALE_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_SALE_CANISTER_ID) + + +TEST_RUNNER_COLLECTION_CANISTER_ID=$(dfx canister id test_runner_collection) +TEST_RUNNER_COLLECTION_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_COLLECTION_CANISTER_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + +dfx build test_runner_collection +dfx build test_runner +dfx build test_runner_nft +dfx build test_runner_nft_2 +dfx build test_runner_instant_transfer +dfx build test_runner_data +dfx build test_runner_utils +dfx build test_runner_storage +dfx build test_runner_sale +dfx build test_canister_factory +dfx build test_storage_factory +dfx build dfxledger +dfx build dfxledger2 + +gzip ./.dfx/local/canisters/test_runner_collection/test_runner_collection.wasm -f +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft_2/test_runner_nft_2.wasm -f +gzip ./.dfx/local/canisters/test_runner_instant_transfer/test_runner_instant_transfer.wasm -f +gzip ./.dfx/local/canisters/test_runner_data/test_runner_data.wasm -f +gzip ./.dfx/local/canisters/test_runner_utils/test_runner_utils.wasm -f +gzip ./.dfx/local/canisters/test_runner_storage/test_runner_storage.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_sale/test_runner_sale.wasm -f + + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + +dfx canister install test_runner_collection --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_collection/test_runner_collection.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record{ canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\"; dfx_ledger = opt (principal \"$DFX_LEDGER_CANISTER_ID\"); dfx_ledger2 = opt (principal \"$DFX_LEDGER_CANISTER2_ID\"); test_runner_nft = opt principal \"$TEST_RUNNER_NFT_CANISTER_ID\"; test_runner_nft_2 = opt principal \"$TEST_RUNNER_NFT_CANISTER_2_ID\"; test_runner_instant = opt(principal \"$TEST_RUNNER_INSTANT_CANISTER_ID\"); test_runner_data = opt(principal \"$TEST_RUNNER_DATA_CANISTER_ID\"); test_runner_utils = opt(principal \"$TEST_RUNNER_UTILS_CANISTER_ID\"); test_runner_collection = opt (principal \"$TEST_RUNNER_COLLECTION_CANISTER_ID\"); test_runner_storage= opt principal \"$TEST_RUNNER_STORAGE_CANISTER_ID\"; test_runner_sale = opt principal \"$TEST_RUNNER_SALE_CANISTER_ID\";})" + +dfx canister install test_runner_nft --wasm ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm.gz --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner_nft_2 --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_nft_2/test_runner_nft_2.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner_storage --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_storage/test_runner_storage.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner_sale --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_sale/test_runner_sale.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner_utils --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_utils/test_runner_utils.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner_data --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_data/test_runner_data.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner_instant_transfer --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_instant_transfer/test_runner_instant_transfer.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +TEST_RUNNER_ID=$(dfx canister id test_runner) + +echo $TEST_RUNNER_ID + +dfx canister call test_runner test +#dfx canister call test_runner_nft test +#dfx canister call test_runner_data_nft test +#dfx canister call test_runner_utils_nft test + diff --git a/runners/test_runner_auction.sh b/runners/test_runner_auction.sh new file mode 100644 index 0000000..735e4f5 --- /dev/null +++ b/runners/test_runner_auction.sh @@ -0,0 +1,62 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +dfx canister create test_runner +dfx canister create test_runner_nft +dfx canister create test_canister_factory +dfx canister create test_storage_factory +dfx canister create dfxledger +dfx canister create dfxledger2 + + + + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_NFT_CANISTER_ID=$(dfx canister id test_runner_nft) +TEST_RUNNER_NFT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_ID) + + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +dfx build test_runner +dfx build test_runner_nft +dfx build test_canister_factory +dfx build test_storage_factory + +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm -f + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; test_runner_nft = opt principal \"$TEST_RUNNER_NFT_CANISTER_ID\"; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;test_runner_storage = null;})" + +dfx canister install test_runner_nft --wasm ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm.gz --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_NFT_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_NFT_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +dfx canister call test_runner test + diff --git a/runners/test_runner_auction_quick.sh b/runners/test_runner_auction_quick.sh new file mode 100644 index 0000000..9d562e3 --- /dev/null +++ b/runners/test_runner_auction_quick.sh @@ -0,0 +1,60 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +#dfx canister create test_runner +#dfx canister create test_runner_nft +#dfx canister create test_canister_factory +#dfx canister create test_storage_factory +#dfx canister create dfxledger + + + + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_NFT_CANISTER_ID=$(dfx canister id test_runner_nft) +TEST_RUNNER_NFT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_ID) + + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +#dfx build test_runner +dfx build test_runner_nft +#dfx build test_canister_factory +#dfx build test_storage_factory + +#gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +#gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +#gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm -f + +#dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +#dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + +#dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; test_runner_nft = opt principal \"$TEST_RUNNER_NFT_CANISTER_ID\"; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;test_runner_storage = null;})" + +dfx canister install test_runner_nft --wasm ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm.gz --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_NFT_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +dfx canister call test_runner test + diff --git a/runners/test_runner_collection.sh b/runners/test_runner_collection.sh new file mode 100644 index 0000000..d1e9fe5 --- /dev/null +++ b/runners/test_runner_collection.sh @@ -0,0 +1,80 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + + +dfx canister create test_runner_collection +dfx canister create dfxledger +dfx canister create dfxledger2 +dfx canister create test_canister_factory +dfx canister create test_storage_factory + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_COLLECTION_CANISTER_ID=$(dfx canister id test_runner_collection) +TEST_RUNNER_COLLECTION_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_COLLECTION_CANISTER_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + +dfx build test_runner_collection +dfx build test_runner +dfx build test_canister_factory +dfx build test_storage_factory +#dfx build test_runner_nft +#dfx build test_runner_nft_2 +#dfx build test_runner_instant_transfer +#dfx build test_runner_data +#dfx build test_runner_utils + +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_collection/test_runner_collection.wasm -f + + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + +dfx canister install test_runner_collection --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_collection/test_runner_collection.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = null; test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = opt principal \"$TEST_RUNNER_COLLECTION_CANISTER_ID\";})" + +#dfx canister install test_runner_nft --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_nft_2 --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_utils --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_data --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_instant_transfer --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +TEST_RUNNER_ID=$(dfx canister id test_runner) + +echo $TEST_RUNNER_ID + +dfx canister call test_runner test +#dfx canister call test_runner_nft test +#dfx canister call test_runner_data_nft test +#dfx canister call test_runner_utils_nft test + diff --git a/runners/test_runner_instant.sh b/runners/test_runner_instant.sh new file mode 100644 index 0000000..5ead484 --- /dev/null +++ b/runners/test_runner_instant.sh @@ -0,0 +1,88 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + + +dfx canister create test_runner_instant_transfer +dfx canister create dfxledger +dfx canister create dfxledger2 +dfx canister create test_canister_factory +dfx canister create test_storage_factory +dfx canister create test_runner +dfx canister create test_runner_nft_2 + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_NFT_CANISTER_2_ID=$(dfx canister id test_runner_nft_2) +TEST_RUNNER__NFT_ACCOUNT_2_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_2_ID) + +TEST_RUNNER_INSTANT_CANISTER_ID=$(dfx canister id test_runner_instant_transfer) +TEST_RUNNER_INSTANT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_2_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + +dfx build test_runner_instant_transfer +dfx build test_runner +dfx build test_canister_factory +dfx build test_storage_factory +#dfx build test_runner_nft +#dfx build test_runner_nft_2 +#dfx build test_runner_instant_transfer +#dfx build test_runner_data +#dfx build test_runner_utils + + +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_instant_transfer/test_runner_instant_transfer.wasm -f + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + + +dfx canister install test_runner_instant_transfer --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_instant_transfer/test_runner_instant_transfer.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\";dfx_ledger2 = opt principal \"$DFX_LEDGER_CANISTER2_ID\"; test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = opt principal \"$TEST_RUNNER_INSTANT_CANISTER_ID\"; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;})" + +#dfx canister install test_runner_nft --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_nft_2 --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_utils --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_data --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_instant_transfer --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +TEST_RUNNER_ID=$(dfx canister id test_runner) + +echo $TEST_RUNNER_ID + +dfx canister call test_runner test +#dfx canister call test_runner_nft test +#dfx canister call test_runner_data_nft test +#dfx canister call test_runner_utils_nft test + diff --git a/runners/test_runner_nft.sh b/runners/test_runner_nft.sh new file mode 100644 index 0000000..d01b2cb --- /dev/null +++ b/runners/test_runner_nft.sh @@ -0,0 +1,84 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + + +dfx canister create test_runner_nft +dfx canister create dfxledger +dfx canister create dfxledger2 +dfx canister create test_canister_factory +dfx canister create test_storage_factory +dfx canister create test_runner + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + + +TEST_RUNNER_NFT_CANISTER_ID=$(dfx canister id test_runner_nft) +TEST_RUNNER__NFT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + +dfx build test_runner_nft +dfx build test_runner +dfx build test_canister_factory +dfx build test_storage_factory +#dfx build test_runner_nft +#dfx build test_runner_nft_2 +#dfx build test_runner_instant_transfer +#dfx build test_runner_data +#dfx build test_runner_utils + +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm -f + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + + +dfx canister install test_runner_nft --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_nft/test_runner_nft.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; dfx_ledger2 = opt principal \"$DFX_LEDGER_CANISTER2_ID\";test_runner_nft = opt principal \"$TEST_RUNNER_NFT_CANISTER_ID\";})" + +#dfx canister install test_runner_nft --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_nft_2 --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_utils --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_data --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_instant_transfer --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +TEST_RUNNER_ID=$(dfx canister id test_runner) + +echo $TEST_RUNNER_ID + +dfx canister call test_runner test +#dfx canister call test_runner_nft test +#dfx canister call test_runner_data_nft test +#dfx canister call test_runner_utils_nft test + diff --git a/runners/test_runner_nft_2.sh b/runners/test_runner_nft_2.sh new file mode 100644 index 0000000..f86218b --- /dev/null +++ b/runners/test_runner_nft_2.sh @@ -0,0 +1,84 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + + +dfx canister create test_runner_nft_2 +dfx canister create dfxledger +dfx canister create dfxledger2 +dfx canister create test_canister_factory +dfx canister create test_storage_factory +dfx canister create test_runner + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + + +TEST_RUNNER_NFT_CANISTER_2_ID=$(dfx canister id test_runner_nft_2) +TEST_RUNNER__NFT_ACCOUNT_2_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_2_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + +dfx build test_runner_nft_2 +dfx build test_runner +dfx build test_canister_factory +dfx build test_storage_factory +#dfx build test_runner_nft +#dfx build test_runner_nft_2 +#dfx build test_runner_instant_transfer +#dfx build test_runner_data +#dfx build test_runner_utils + +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft_2/test_runner_nft_2.wasm -f + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + + +dfx canister install test_runner_nft_2 --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_nft_2/test_runner_nft_2.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; dfx_ledger2 = opt principal \"$DFX_LEDGER_CANISTER2_ID\";test_runner_nft_2 = opt principal \"$TEST_RUNNER_NFT_CANISTER_2_ID\";})" + +#dfx canister install test_runner_nft --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_nft_2 --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_utils --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_data --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_instant_transfer --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +TEST_RUNNER_ID=$(dfx canister id test_runner) + +echo $TEST_RUNNER_ID + +dfx canister call test_runner test +#dfx canister call test_runner_nft test +#dfx canister call test_runner_data_nft test +#dfx canister call test_runner_utils_nft test + diff --git a/runners/test_runner_nft_2_quick.sh b/runners/test_runner_nft_2_quick.sh new file mode 100644 index 0000000..be29b09 --- /dev/null +++ b/runners/test_runner_nft_2_quick.sh @@ -0,0 +1,81 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + + +#dfx canister create test_runner_nft_2 +#dfx canister create dfxledger +#dfx canister create dfxledger2 +#dfx canister create test_canister_factory +#dfx canister create test_storage_factory + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + + +TEST_RUNNER_NFT_CANISTER_2_ID=$(dfx canister id test_runner_nft_2) +TEST_RUNNER__NFT_ACCOUNT_2_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_2_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + +dfx build test_runner_nft_2 +#dfx build test_runner +dfx build test_canister_factory +#dfx build test_storage_factory +#dfx build test_runner_nft +#dfx build test_runner_nft_2 +#dfx build test_runner_instant_transfer +#dfx build test_runner_data +#dfx build test_runner_utils + +#gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +#gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_nft_2/test_runner_nft_2.wasm -f + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +#dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + + +dfx canister install test_runner_nft_2 --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_nft_2/test_runner_nft_2.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; test_runner_nft = null; test_runner_nft_2 = opt principal \"$TEST_RUNNER_NFT_CANISTER_2_ID\"; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;})" + +#dfx canister install test_runner_nft --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_nft_2 --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_utils --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_data --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install test_runner_instant_transfer --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +TEST_RUNNER_ID=$(dfx canister id test_runner) + +echo $TEST_RUNNER_ID + +dfx canister call test_runner test +#dfx canister call test_runner_nft test +#dfx canister call test_runner_data_nft test +#dfx canister call test_runner_utils_nft test + diff --git a/runners/test_runner_sale.sh b/runners/test_runner_sale.sh new file mode 100644 index 0000000..1128ec5 --- /dev/null +++ b/runners/test_runner_sale.sh @@ -0,0 +1,65 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +dfx canister create test_runner +dfx canister create test_runner_sale +dfx canister create test_canister_factory +dfx canister create test_storage_factory +dfx canister create dfxledger +dfx canister create dfxledger2 + + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_SALE_CANISTER_ID=$(dfx canister id test_runner_sale) +TEST_RUNNER_SALE_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_SALE_CANISTER_ID) + + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +dfx build test_runner +dfx build test_runner_sale +dfx build test_canister_factory +dfx build test_storage_factory +dfx build dfxledger +dfx build dfxledger2 + +gzip ./.dfx/local/canisters/test_runner_sale/test_runner_sale.wasm -f +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f + + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; dfx_ledger2 = opt principal \"$DFX_LEDGER_CANISTER2_ID\";test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;test_runner_storage = null; test_runner_sale = opt principal \"$TEST_RUNNER_SALE_CANISTER_ID\";})" + +dfx canister install test_runner_sale --wasm ./.dfx/local/canisters/test_runner_sale/test_runner_sale.wasm.gz --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +dfx canister call test_runner test + diff --git a/runners/test_runner_sale_quick.sh b/runners/test_runner_sale_quick.sh new file mode 100644 index 0000000..72a1c2a --- /dev/null +++ b/runners/test_runner_sale_quick.sh @@ -0,0 +1,54 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +#dfx canister create test_runner +dfx canister create test_runner_sale +#dfx canister create test_canister_factory +#dfx canister create test_storage_factory +#dfx canister create dfxledger + + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_SALE_CANISTER_ID=$(dfx canister id test_runner_sale) +TEST_RUNNER_SALE_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_SALE_CANISTER_ID) + + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +dfx build test_runner +dfx build test_runner_sale +#dfx build test_canister_factory +#dfx build test_storage_factory +gzip ./.dfx/local/canisters/test_runner_sale/test_runner_sale.wasm -f + +#dfx canister install test_canister_factory --mode=reinstall + +#dfx canister install test_storage_factory --mode=reinstall + +dfx canister install test_runner --mode=reinstall --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; dfx_ledger2 = opt principal \"$DFX_LEDGER_CANISTER2_ID\";test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;test_runner_storage = null; test_runner_sale = opt principal \"$TEST_RUNNER_SALE_CANISTER_ID\";})" + +dfx canister install test_runner_sale --wasm ./.dfx/local/canisters/test_runner_sale/test_runner_sale.wasm.gz --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +dfx canister call test_runner test + diff --git a/runners/test_runner_storage.sh b/runners/test_runner_storage.sh new file mode 100644 index 0000000..4dfae3f --- /dev/null +++ b/runners/test_runner_storage.sh @@ -0,0 +1,61 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +dfx canister create test_runner +dfx canister create test_runner_storage +dfx canister create dfxledger +dfx canister create dfxledger2 +dfx canister create test_canister_factory +dfx canister create test_storage_factory + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) +#TEST_RUNNER_NFT_CANISTER_ID=$(dfx canister id test_runner_nft) +#TEST_RUNNER__NFT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_ID) + +#TEST_RUNNER_DATA_CANISTER_ID=$(dfx canister id test_runner_data) +#TEST_RUNNER_DATA_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_DATA_CANISTER_ID) + +TEST_RUNNER_STORAGE_CANISTER_ID=$(dfx canister id test_runner_storage) +TEST_RUNNER_STORAGE_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_STORAGE_CANISTER_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +dfx build test_runner +dfx build test_runner_storage +dfx build test_canister_factory +dfx build test_storage_factory + +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_storage/test_runner_storage.wasm -f + + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = null; test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;test_runner_storage = opt principal \"$TEST_RUNNER_STORAGE_CANISTER_ID\";})" + +dfx canister install test_runner_storage --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_storage/test_runner_storage.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER__NFT_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister call test_runner test diff --git a/runners/test_runner_storage_quick.sh b/runners/test_runner_storage_quick.sh new file mode 100644 index 0000000..f0ac426 --- /dev/null +++ b/runners/test_runner_storage_quick.sh @@ -0,0 +1,61 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +#dfx canister create test_runner +#dfx canister create test_runner_storage +#dfx canister create dfxledger +#dfx canister create dfxledger2 +#dfx canister create test_canister_factory +#dfx canister create test_storage_factory + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) +#TEST_RUNNER_NFT_CANISTER_ID=$(dfx canister id test_runner_nft) +#TEST_RUNNER__NFT_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_NFT_CANISTER_ID) + +#TEST_RUNNER_DATA_CANISTER_ID=$(dfx canister id test_runner_data) +#TEST_RUNNER_DATA_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_DATA_CANISTER_ID) + +TEST_RUNNER_STORAGE_CANISTER_ID=$(dfx canister id test_runner_storage) +TEST_RUNNER_STORAGE_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_STORAGE_CANISTER_ID) + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +#dfx build test_runner +dfx build test_runner_storage +dfx build test_canister_factory +dfx build test_storage_factory + +#gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f +gzip ./.dfx/local/canisters/test_runner_storage/test_runner_storage.wasm -f + + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + +#dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = null; test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = null; test_runner_collection = null;test_runner_storage = opt principal \"$TEST_RUNNER_STORAGE_CANISTER_ID\";})" + +dfx canister install test_runner_storage --mode=reinstall --wasm ./.dfx/local/canisters/test_runner_storage/test_runner_storage.wasm.gz --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +#dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER__NFT_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister call test_runner test diff --git a/runners/test_runner_utils.sh b/runners/test_runner_utils.sh new file mode 100644 index 0000000..a2aa153 --- /dev/null +++ b/runners/test_runner_utils.sh @@ -0,0 +1,63 @@ +set -ex + +dfx identity new test_nft_ref || true +dfx identity use test_nft_ref + +ADMIN_PRINCIPAL=$(dfx identity get-principal) +ADMIN_ACCOUNTID=$(dfx ledger account-id) + +echo $ADMIN_PRINCIPAL +echo $ADMIN_ACCOUNTID + +dfx canister create test_runner +dfx canister create test_runner_utils +dfx canister create test_canister_factory +dfx canister create test_storage_factory +dfx canister create dfxledger +dfx canister create dfxledger2 + + +DFX_LEDGER_CANISTER_ID=$(dfx canister id dfxledger) +DFX_LEDGER_ACCOUNT_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER_ID) + +DFX_LEDGER_CANISTER2_ID=$(dfx canister id dfxledger2) +DFX_LEDGER_ACCOUNT2_ID=$(python3 principal_to_accountid.py $DFX_LEDGER_CANISTER2_ID) + +TEST_RUNNER_CANISTER_ID=$(dfx canister id test_runner) +TEST_RUNNER_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_CANISTER_ID) + +TEST_RUNNER_UTIL_CANISTER_ID=$(dfx canister id test_runner_utils) +TEST_RUNNER_UTIL_ACCOUNT_ID=$(python3 principal_to_accountid.py $TEST_RUNNER_UTIL_CANISTER_ID) + + +TEST_CANISTER_FACTORY_ID=$(dfx canister id test_canister_factory) +TEST_STORAGE_FACTORY_ID=$(dfx canister id test_storage_factory) + + +dfx build test_runner +dfx build test_runner_utils +dfx build test_canister_factory +dfx build test_storage_factory + +gzip ./.dfx/local/canisters/test_runner_utils/test_runner_utils.wasm -f +gzip ./.dfx/local/canisters/test_runner/test_runner.wasm -f +gzip ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm -f +gzip ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm -f + + +dfx canister install test_canister_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_canister_factory/test_canister_factory.wasm.gz + +dfx canister install test_storage_factory --mode=reinstall --wasm ./.dfx/local/canisters/test_storage_factory/test_storage_factory.wasm.gz + + +dfx canister install test_runner --mode=reinstall --wasm ./.dfx/local/canisters/test_runner/test_runner.wasm.gz --argument "(record { canister_factory = principal \"$TEST_CANISTER_FACTORY_ID\"; storage_factory = principal \"$TEST_STORAGE_FACTORY_ID\";dfx_ledger = opt principal \"$DFX_LEDGER_CANISTER_ID\"; test_runner_nft = null; test_runner_nft_2 = null; test_runner_instant = null; test_runner_data = null; test_runner_utils = opt principal \"$TEST_RUNNER_UTIL_CANISTER_ID\"; test_runner_collection = null;test_runner_storage = null; test_runner_sale = null;})" + +dfx canister install test_runner_utils --wasm ./.dfx/local/canisters/test_runner_utils/test_runner_utils.wasm.gz --mode=reinstall --argument "(principal \"$DFX_LEDGER_CANISTER_ID\", principal \"$DFX_LEDGER_CANISTER2_ID\")" + +dfx canister install dfxledger --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + +dfx canister install dfxledger2 --mode=reinstall --argument "(record { minting_account = \"$ADMIN_ACCOUNTID\"; initial_values = vec { record { \"$TEST_RUNNER_ACCOUNT_ID\"; record { e8s = 18446744073709551615: nat64 } } }; max_message_size_bytes = null; transaction_window = null; archive_options = opt record { trigger_threshold = 2000: nat64; num_blocks_to_archive = 1000: nat64; node_max_memory_size_bytes = null; max_message_size_bytes = null; controller_id = principal \"$TEST_RUNNER_CANISTER_ID\" }; send_whitelist = vec {};standard_whitelist = vec {};transfer_fee = null; token_symbol = null; token_name = null;admin = principal \"$TEST_RUNNER_CANISTER_ID\"})" + + +dfx canister call test_runner test + diff --git a/src/origyn_nft_reference/DIP721.mo b/src/origyn_nft_reference/DIP721.mo new file mode 100644 index 0000000..ae7d6ea --- /dev/null +++ b/src/origyn_nft_reference/DIP721.mo @@ -0,0 +1,145 @@ +module { + + //this file contains types needed to provide responses to DIP721 style NFT commands + + public type GenericValue = { + #Nat64Content : Nat64; + #Nat32Content : Nat32; + #BoolContent : Bool; + #Nat8Content : Nat8; + #Int64Content : Int64; + #IntContent : Int; + #NatContent : Nat; + #Nat16Content : Nat16; + #Int32Content : Int32; + #Int8Content : Int8; + #FloatContent : Float; + #Int16Content : Int16; + #BlobContent : [Nat8]; + #NestedContent : Vec; + #Principal : Principal; + #TextContent : Text; + }; + public type InitArgs = { + logo : ?Text; + name : ?Text; + custodians : ?[Principal]; + symbol : ?Text; + }; + public type Metadata = { + logo : ?Text; + name : ?Text; + created_at : Nat64; + upgraded_at : Nat64; + custodians : [Principal]; + symbol : ?Text; + }; + public type Metadata_1 = { #Ok : [Nat]; #Err : NftError }; + public type Metadata_2 = { #Ok : [TokenMetadata]; #Err : NftError }; + public type Metadata_3 = { #Ok : TokenMetadata; #Err : NftError }; + public type Metadata_4 = { #Ok : TxEvent; #Err : NftError }; + public type NftError = { + #UnauthorizedOperator; + #SelfTransfer; + #TokenNotFound; + #UnauthorizedOwner; + #TxNotFound; + #SelfApprove; + #OperatorNotFound; + #ExistedNFT; + #OwnerNotFound; + #Other : Text; + }; + public type Result = { #Ok : Nat; #Err : NftError }; + public type Result_1 = { #Ok : Bool; #Err : NftError }; + public type OwnerOfResponse = { #Ok : ?Principal; #Err : NftError }; + public type Stats = { + cycles : Nat; + total_transactions : Nat; + total_unique_holders : Nat; + total_supply : Nat; + }; + public type SupportedInterface = { + #Burn; + #Mint; + #Approval; + #TransactionHistory; + }; + public type TokenMetadata = { + transferred_at : ?Nat64; + transferred_by : ?Principal; + owner : ?Principal; + operator : ?Principal; + approved_at : ?Nat64; + approved_by : ?Principal; + properties : [(Text, GenericValue)]; + is_burned : Bool; + token_identifier : Nat; + burned_at : ?Nat64; + burned_by : ?Principal; + minted_at : Nat64; + minted_by : Principal; + }; + public type TxEvent = { + time : Nat64; + operation : Text; + details : [(Text, GenericValue)]; + caller : Principal; + }; + public type Vec = [ + ( + Text, + { + #Nat64Content : Nat64; + #Nat32Content : Nat32; + #BoolContent : Bool; + #Nat8Content : Nat8; + #Int64Content : Int64; + #IntContent : Int; + #NatContent : Nat; + #Nat16Content : Nat16; + #Int32Content : Int32; + #Int8Content : Int8; + #FloatContent : Float; + #Int16Content : Int16; + #BlobContent : [Nat8]; + #NestedContent : Vec; + #Principal : Principal; + #TextContent : Text; + }, + ) + ]; + public type Self = ?InitArgs -> async actor { + approve : shared (Principal, Nat) -> async Result; + balanceOf : shared query Principal -> async Result; + burn : shared Nat -> async Result; + custodians : shared query () -> async [Principal]; + cycles : shared query () -> async Nat; + isApprovedForAll : shared query (Principal, Principal) -> async Result_1; + logo : shared query () -> async ?Text; + metadata : shared query () -> async Metadata; + mint : shared (Principal, Nat, [(Text, GenericValue)]) -> async Result; + name : shared query () -> async ?Text; + operatorOf : shared query Nat -> async OwnerOfResponse; + operatorTokenIdentifiers : shared query Principal -> async Metadata_1; + operatorTokenMetadata : shared query Principal -> async Metadata_2; + ownerOf : shared query Nat -> async OwnerOfResponse; + ownerTokenIdentifiers : shared query Principal -> async Metadata_1; + ownerTokenMetadata : shared query Principal -> async Metadata_2; + setApprovalForAll : shared (Principal, Bool) -> async Result; + setCustodians : shared [Principal] -> async (); + setLogo : shared Text -> async (); + setName : shared Text -> async (); + setSymbol : shared Text -> async (); + stats : shared query () -> async Stats; + supportedInterfaces : shared query () -> async [SupportedInterface]; + symbol : shared query () -> async ?Text; + tokenMetadata : shared query Nat -> async Metadata_3; + totalSupply : shared query () -> async Nat; + totalTransactions : shared query () -> async Nat; + totalUniqueHolders : shared query () -> async Nat; + transaction : shared query Nat -> async Metadata_4; + transfer : shared (Principal, Nat) -> async Result; + transferFrom : shared (Principal, Principal, Nat) -> async Result; + } +} \ No newline at end of file diff --git a/src/origyn_nft_reference/data.mo b/src/origyn_nft_reference/data.mo new file mode 100644 index 0000000..d9046ac --- /dev/null +++ b/src/origyn_nft_reference/data.mo @@ -0,0 +1,213 @@ +import Buffer "mo:base/Buffer"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Metadata "metadata"; +import MigrationTypes "./migrations/types"; +import Option "mo:base/Option"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Types "types"; + +module { + + let Map = MigrationTypes.Current.Map; + let CandyTypes = MigrationTypes.Current.CandyTypes; + + let debug_channel = { + function_announce = false; + data_access = false; + }; + + //gets a text attribute out of a class + private func _get_text_attribute_from_class(this_item: CandyTypes.CandyValue, name : Text) : ?Text { + return switch(Properties.getClassProperty(this_item, name)){ + + case(null){ + return null; + }; + case(?val){ + return ?Conversions.propertyToText(val); + }; + } + }; + + //ORIGYN NFTs have a simple database inside of them. Apps can store data in a + //reserved space that can have flexible permissions. The apps can make it so + //that only they can read the data and/or only they can write the data. They + //can also grant write permissions to certain other principals via an allow list. + //Currnelty the implementation is more like a structured notepad where you have to + //write out the enter note each time. Future versions will add granular access to + //data per app. + public func update_app_nft_origyn(request: Types.NFTUpdateRequest, state: Types.State, caller: Principal): Result.Result{ + + let (token_id, app_id) = switch(request){ + case(#replace(details)){ + //D.print(debug_show(details.data)); + //(details.token_id, Option.getMapped(Properties.getClassProperty(details.data, "app_id"), propertyToText, return #err(Types.errors(#app_id_not_found, "update_app_nft_origyn - cannnot find app id ", ? caller)) ))}; + let app_id = switch(_get_text_attribute_from_class(details.data, Types.metadata.__apps_app_id)){ + case(null){ + return #err(Types.errors(#token_not_found, "update_app_nft_origyn - cannnot find app_id", ? caller)); + }; + case(?val){val}; + }; + (details.token_id, app_id)}; + case(#update(details)){(details.token_id, details.app_id)}; + }; + + debug if(debug_channel.data_access) D.print("found token and app " # token_id # " " # app_id); + + var found_metadata : CandyTypes.CandyValue = #Empty; + + //try to find existing metadata + switch(Map.get(state.state.nft_metadata, Map.thash, token_id)){ + case(null){ + + return #err(Types.errors(#token_not_found, "update_app_nft_origyn - cannnot find token", ? caller)); + + }; + + case(?this_metadata){ + //exists + debug if(debug_channel.data_access) D.print("exists"); + + //find the app + switch(Properties.getClassProperty(this_metadata, Types.metadata.__apps)){ + case(null){return #err(Types.errors(#content_not_found, "update_app_nft_origyn - __apps node not found", ? caller));}; + case(?found){ + debug if(debug_channel.data_access) D.print("found apps"); + let found_array = Conversions.valueToValueArray(found.value); + let new_list = Buffer.Buffer(found_array.size()); + + //this is currently a very ineffcient way of doing this. Once candy adds dicitionaries we should switch to that + //currently we are rewriting the entire __apps section each time. + for(this_item in found_array.vals()){ + if(?app_id == _get_text_attribute_from_class(this_item, Types.metadata.__apps_app_id)){ + debug if(debug_channel.data_access) D.print("got the app"); + switch(request){ + case(#replace(detail)){ + debug if(debug_channel.data_access) D.print("this is replace"); + //we check to see if we have write rights + switch(Properties.getClassProperty(this_item, "write")){ + //nyi: create user story and test for missing read/write + + case(null){return #err(Types.errors(#content_not_found, "update_app_nft_origyn - write node not found", ? caller))}; + case(?write_node){ + debug if(debug_channel.data_access) D.print("have the write node"); + switch(write_node.value){ + case(#Text(write_detail)){ + if(write_detail == "public"){ + //nyi: anyone can write. Maybe an error? + return #err(Types.errors(#improper_interface, "update_app_nft_origyn - write node cannot be public - this isn't a bathroom stall", ? caller)); + } else if (write_detail == "nft_owner") { + if(Metadata.is_owner(this_metadata, #principal(caller))){} + else{ + return #err(Types.errors(#unauthorized_access, "update_app_nft_origyn - write is nft_owner - must own this NFT", ? caller)); + } + } else if (write_detail == "collection_owner") { + if(state.state.collection_data.owner == caller){} + else{ + return #err(Types.errors(#unauthorized_access, "update_app_nft_origyn - write is nft_owner - must own this NFT", ? caller)); + } + } else { + return #err(Types.errors(#nyi, "update_app_nft_origyn - write node mal formed", ? caller)); + }; + + new_list.add(detail.data); + }; + case(#Class(write_detail)){ + //D.print("have write detail"); + switch(Properties.getClassProperty(write_node.value, "type")){ + case(?write_type){ + //D.print("have write type"); + switch(write_type.value){ + case(#Text(write_type_detail)){ + //D.print("have write type detial"); + if(write_type_detail == "allow"){ + switch(Properties.getClassProperty(write_node.value,"list")){ + case(?allow_list){ + //D.print("have allow llist"); + //D.print(debug_show(Conversion.valueToValueArray(allow_list.value))); + var b_found = false; + label search for(this_principal in Conversions.valueToValueArray(allow_list.value).vals()){ + //D.print(Principal.toText( caller)); + if( caller == Conversions.valueToPrincipal(this_principal)){ + //we are allowed + //D.print("found a match"); + b_found := true; + break search; + + }; + }; + if(b_found == false){ + return #err(Types.errors(#unauthorized_access, "update_app_nft_origyn - not in allow list", ? caller)); + + } else { + //D.print("adding new data"); + //do the replace + new_list.add(detail.data); + }; + //D.print("made it past list"); + + }; + case(null){ + return #err(Types.errors(#unauthorized_access, "update_app_nft_origyn - empty allow list", ? caller)); + + } + }; + } else {//nyi: implement block list; roles based security + return #err(Types.errors(#nyi, "update_app_nft_origyn - only allow list and public implemented", ? caller)); + + }; + }; + + case(_){ + return #err(Types.errors(#nyi, "update_app_nft_origyn - not in proper type of write type", ? caller)); + + }; + }; + }; + case(_){ + return #err(Types.errors(#nyi, "update_app_nft_origyn - type is null for write type", ? caller)); + + }; + }; + }; + case(_){ + return #err(Types.errors(#nyi, "update_app_nft_origyn - not a class", ? caller)); + + }; + }; + }; + }; + + }; + case(#update(detail)){ + return #err(Types.errors(#nyi, "update_app_nft_origyn - update not implemented", ? caller)); + + }; + + }; + } else { + //D.print("not the app"); + //D.print(app_id); + new_list.add(this_item); + }; + }; + found_metadata := #Class(switch(Properties.updateProperties(Conversions.valueToProperties(this_metadata), [{name = Types.metadata.__apps; mode=#Set(#Array(#thawed(new_list.toArray())))}])){ + case(#err(errType)){ + return #err(Types.errors(#update_class_error, "update_app_nft_origyn - set metadata status", ?caller)); + }; + case(#ok(result)){ + result; + } + }); + }; + }; + + //swap metadata + let insert_result = Map.set(state.state.nft_metadata, Map.thash, token_id, found_metadata); + return #ok(true); + }; + }; + }; +} \ No newline at end of file diff --git a/src/origyn_nft_reference/dfxtypes.mo b/src/origyn_nft_reference/dfxtypes.mo new file mode 100644 index 0000000..b7cfad4 --- /dev/null +++ b/src/origyn_nft_reference/dfxtypes.mo @@ -0,0 +1,217 @@ +module { + + //this file contains types needed to interact with an ICP/OGY style ledger + + public type AccountBalanceArgs = { account : AccountIdentifier }; + public type AccountBalanceArgsDFX = { account : AccountIdentifierDFX }; + public type AccountIdentifier = Blob; + public type AccountIdentifierDFX = Text; + public type Archive = { canister_id : Principal }; + public type ArchiveOptions = { + num_blocks_to_archive : Nat64; + trigger_threshold : Nat64; + max_message_size_bytes : ?Nat64; + cycles_for_archive_creation : ?Nat64; + node_max_memory_size_bytes : ?Nat64; + controller_id : Principal; + }; + public type Archives = { archives : [Archive] }; + public type Block = { + transaction : Transaction; + timestamp : TimeStamp; + parent_hash : ?Blob; + }; + public type BlockArg = BlockHeight; + public type BlockDFX = { + transaction : TransactionDFX; + timestamp : TimeStamp; + parent_hash : ?[Nat8]; + }; + public type BlockHeight = Nat64; + public type BlockIndex = Nat64; + public type BlockRange = { blocks : [Block] }; + public type BlockRes = ?{ + #Ok : ?{ #Ok : Block; #Err : CanisterId }; + #Err : Text; + }; + public type CanisterId = Principal; + public type Duration = { secs : Nat64; nanos : Nat32 }; + public type GetBlocksArgs = { start : BlockIndex; length : Nat64 }; + public type Hash = ?{ inner : [Nat8] }; + public type HeaderField = (Text, Text); + public type HttpRequest = { + url : Text; + method : Text; + body : [Nat8]; + headers : [HeaderField]; + }; + public type HttpResponse = { + body : [Nat8]; + headers : [HeaderField]; + status_code : Nat16; + }; + public type LedgerCanisterInitPayload = { + send_whitelist : [Principal]; + admin : Principal; + token_symbol : ?Text; + transfer_fee : ?Tokens; + minting_account : AccountIdentifierDFX; + transaction_window : ?Duration; + max_message_size_bytes : ?Nat64; + archive_options : ?ArchiveOptions; + standard_whitelist : [Principal]; + initial_values : [(AccountIdentifierDFX, Tokens)]; + token_name : ?Text; + }; + public type Memo = Nat64; + public type NotifyCanisterArgs = { + to_subaccount : ?SubAccount; + from_subaccount : ?SubAccount; + to_canister : Principal; + max_fee : Tokens; + block_height : BlockHeight; + }; + public type Operation = { + #Burn : { from : AccountIdentifier; amount : Tokens }; + #Mint : { to : AccountIdentifier; amount : Tokens }; + #Transfer : { + to : AccountIdentifier; + fee : Tokens; + from : AccountIdentifier; + amount : Tokens; + }; + }; + public type OperationDFX = { + #Burn : { from : AccountIdentifierDFX; amount : Tokens }; + #Mint : { to : AccountIdentifierDFX; amount : Tokens }; + #Send : { + to : AccountIdentifierDFX; + from : AccountIdentifierDFX; + amount : Tokens; + }; + }; + public type QueryArchiveError = { + #BadFirstBlockIndex : { + requested_index : BlockIndex; + first_valid_index : BlockIndex; + }; + #Other : { error_message : Text; error_code : Nat64 }; + }; + public type QueryArchiveFn = shared query GetBlocksArgs -> async QueryArchiveResult; + public type QueryArchiveResult = { + #Ok : BlockRange; + #Err : QueryArchiveError; + }; + public type QueryBlocksResponse = { + certificate : ?[Nat8]; + blocks : [Block]; + chain_length : Nat64; + first_block_index : BlockIndex; + archived_blocks : [ + { callback : QueryArchiveFn; start : BlockIndex; length : Nat64 } + ]; + }; + public type SendArgs = { + to : AccountIdentifierDFX; + fee : Tokens; + memo : Memo; + from_subaccount : ?SubAccount; + created_at_time : ?TimeStamp; + amount : Tokens; + }; + public type SubAccount = [Nat8]; + public type TimeStamp = { timestamp_nanos : Nat64 }; + public type TipOfChainRes = { + certification : ?[Nat8]; + tip_index : BlockHeight; + }; + public type Tokens = { e8s : Nat64 }; + public type Transaction = { + memo : Memo; + operation : ?Operation; + created_at_time : TimeStamp; + }; + public type TransactionDFX = { + memo : Memo; + operation : ?OperationDFX; + created_at_time : TimeStamp; + }; + public type TransferArgs = { + to : AccountIdentifier; + fee : Tokens; + memo : Memo; + from_subaccount : ?SubAccount; + created_at_time : ?TimeStamp; + amount : Tokens; + }; + public type TransferError = { + #TxTooOld : { allowed_window_nanos : Nat64 }; + #BadFee : { expected_fee : Tokens }; + #TxDuplicate : { duplicate_of : BlockIndex }; + #TxCreatedInFuture; + #InsufficientFunds : { balance : Tokens }; + }; + public type TransferFee = { transfer_fee : Tokens }; + public type TransferFeeArg = {}; + public type TransferResult = { #Ok : BlockIndex; #Err : TransferError }; + public type TransferStandardArgs = { + to : AccountIdentifier; + fee : Tokens; + memo : Memo; + from_subaccount : ?SubAccount; + from_principal : Principal; + created_at_time : ?TimeStamp; + amount : Tokens; + }; + public type Service = actor { + account_balance : shared query AccountBalanceArgs -> async Tokens; + account_balance_dfx : shared query AccountBalanceArgsDFX -> async Tokens; + archives : shared query () -> async Archives; + block_dfx : shared query BlockArg -> async BlockRes; + decimals : shared query () -> async { decimals : Nat32 }; + get_admin : shared query {} -> async Principal; + get_minting_account_id_dfx : shared query {} -> async ?AccountIdentifier; + get_nodes : shared query () -> async [CanisterId]; + get_send_whitelist_dfx : shared query {} -> async [Principal]; + http_request : shared query HttpRequest -> async HttpResponse; + name : shared query () -> async { name : Text }; + notify_dfx : shared NotifyCanisterArgs -> async (); + query_blocks : shared query GetBlocksArgs -> async QueryBlocksResponse; + send_dfx : shared SendArgs -> async BlockHeight; + set_admin : shared Principal -> async (); + set_minting_account_id_dfx : shared AccountIdentifier -> async (); + set_send_whitelist_dfx : shared [Principal] -> async (); + set_standard_whitelist_dfx : shared [Principal] -> async (); + symbol : shared query () -> async { symbol : Text }; + tip_of_chain_dfx : shared query {} -> async TipOfChainRes; + total_supply_dfx : shared query {} -> async Tokens; + transfer : shared TransferArgs -> async TransferResult; + transfer_fee : shared query TransferFeeArg -> async TransferFee; + transfer_standard_stdldg : shared TransferStandardArgs -> async TransferResult; + }; + + type GetBlocksResult = { + #Ok : BlockRange; + #Err : GetBlocksError; + }; + + public type GetBlocksError = { + + /// The [GetBlocksArgs.start] is below the first block that + /// archive node stores. + #BadFirstBlockIndex : { + requested_index : BlockIndex; + first_valid_index : BlockIndex; + }; + + /// Reserved for future use. + #Other : { + error_code : Nat64; + error_message : Text; + }; + }; + + public type ArchiveService = actor { + get_blocks : shared query(GetBlocksArgs) -> async (GetBlocksResult); + } +} \ No newline at end of file diff --git a/src/origyn_nft_reference/governance.mo b/src/origyn_nft_reference/governance.mo new file mode 100644 index 0000000..1958ea8 --- /dev/null +++ b/src/origyn_nft_reference/governance.mo @@ -0,0 +1,52 @@ +import Buffer "mo:base/Buffer"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Metadata "metadata"; +import MigrationTypes "./migrations/types"; +import Option "mo:base/Option"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Types "types"; + +module { + + let Map = MigrationTypes.Current.Map; + let CandyTypes = MigrationTypes.Current.CandyTypes; + + let debug_channel = { + function_announce = false; + governance = false; + }; + + public func governance_nft_origyn(state: Types.State, request : Types.GovernanceRequest, caller : Principal) : Result.Result { + + if(state.state.collection_data.network != ?caller){ + return return #err(Types.errors(#unauthorized_access, "governance_nft_origyn - unauthorized access - only network can govern", ?caller)) + }; + + switch(request){ + case(#clear_shared_wallets(token_id)){ + var metadata = switch(Metadata.get_metadata_for_token(state, token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "share_nft_origyn token not found" # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_wallet_shares, #Empty); + Map.set(state.state.nft_metadata, Map.thash, token_id, metadata); + return #ok(#clear_shared_wallets(true)); + + }; + case(_){ + return #err(Types.errors(#nyi, "governance_nft_origyn - not yet implemented" # debug_show(request), ?caller)) + + }; + }; + }; + + +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/http.mo b/src/origyn_nft_reference/http.mo new file mode 100644 index 0000000..4086184 --- /dev/null +++ b/src/origyn_nft_reference/http.mo @@ -0,0 +1,1519 @@ +import http "mo:http/Http"; +import httpparser "mo:httpparser/lib"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversion "mo:candy_0_1_10/conversion"; +import Properties "mo:candy_0_1_10/properties"; +import Principal "mo:base/Principal"; +import Time "mo:base/Time"; +import D "mo:base/Debug"; +import Nat "mo:base/Nat"; +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import Buffer "mo:base/Buffer"; +import Char "mo:base/Char"; +import Iter "mo:base/Iter"; +import List "mo:base/List"; +import Option "mo:base/Option"; +import Result "mo:base/Result"; +import Text "mo:base/Text"; +import TrieMap "mo:base/TrieMap"; +import Random "mo:base/Random"; +import Types "types"; +import NFTUtils "utils"; +import Metadata "metadata"; +import Map "mo:map_6_0_0/Map"; + +module { + + let debug_channel = { + streaming = false; + large_content = false; + library = false; + request = false; + }; + + //the max size of a streaming chunk + private let __MAX_STREAM_CHUNK = 2048000; + + public type HTTPResponse = { + body : Blob; + headers : [http.HeaderField]; + status_code : Nat16; + streaming_strategy : ?StreamingStrategy; + }; + + public type StreamingStrategy = { + #Callback: { + callback : shared () -> async (); + token : StreamingCallbackToken; + }; + }; + + public type StreamingCallbackToken = { + content_encoding : Text; + index : Nat; + key : Text; + }; + + public type StreamingCallbackResponse = { + body : Blob; + token : ?StreamingCallbackToken; + }; + + public type HeaderField = (Text, Text); + + public type HttpRequest = { + body: Blob; + headers: [HeaderField]; + method: Text; + url: Text; + }; + + // generates a random access key for use with procuring owner's assets + public func gen_access_key(): async Text { + let entropy = await Random.blob(); // get initial entropy + var rand = Text.replace(debug_show(entropy), #text("\\"), ""); + Text.replace(rand, #text("\""), ""); + }; + + //handels stream content with chunk requests + public func handle_stream_content( + state : Types.State, + token_id : Text, + library_id : Text, + start : ?Nat, + end : ?Nat, + contentType : Text, + data : CandyTypes.DataZone, + req : httpparser.ParsedHttpRequest + ) : HTTPResponse { + + + let canister_id: Text = Principal.toText(state.canister()); + let canister = actor (canister_id) : actor { nftStreamingCallback : shared () -> async () }; + + + debug if(debug_channel.streaming) D.print("Handling an range streaming NFT" # debug_show(token_id)); + var size : Nat = 0; + //find the right data zone + for(this_item in data.vals()){ + switch(this_item){ + case(#Bytes(bytes)){ + switch(bytes){ + case(#thawed(aArray)){ + size := size + aArray.size(); + }; + case(#frozen(aArray)){ + size := size + aArray.size(); + }; + }; + }; + case(#Blob(bytes)){ + + size := size + bytes.size(); + + }; + case(_){}; + }; + + }; + + var rEnd = switch(end){ + case(null){size-1 : Nat;}; + case(?v){v}; + }; + + let rStart = switch(start){ + case(null){0;}; + case(?v){v}; + }; + + debug if(debug_channel.streaming)D.print( Nat.toText(rStart) # " - " # Nat.toText(rEnd) # " / " #Nat.toText(size)); + + if(rEnd - rStart : Nat > __MAX_STREAM_CHUNK){ + rEnd := rStart + __MAX_STREAM_CHUNK - 1; + }; + + if(rEnd - rStart : Nat > __MAX_STREAM_CHUNK){ + debug if(debug_channel.streaming) D.print("handling big branch"); + + let cbt = _stream_media(token_id, library_id, rStart, data, rStart, rEnd, size); + + debug if(debug_channel.streaming)D.print("The cbt: " # debug_show(cbt.callback)); + { + //need to use streaming strategy + status_code = 206; + headers = [ + ("Content-Type", contentType), + ("Accept-Ranges", "bytes"), + //("Content-Range", "bytes 0-1/" # Nat.toText(size)), + ("Content-Range", "bytes " # Nat.toText(rStart) # "-" # Nat.toText(rEnd) # "/" # Nat.toText(size)), + //("Content-Range", "bytes 0-"# Nat.toText(size-1) # "/" # Nat.toText(size)), + ("Content-Length", Nat.toText(cbt.payload.size())), + ("Cache-Control","private"), + ]; + body = cbt.payload; + streaming_strategy = switch (cbt.callback) { + case (null) { null; }; + case (? tk) { + ?#Callback({ + token = tk; + callback = canister.nftStreamingCallback; + }); + }; + }; + }; + } else { + //just one chunk + debug if(debug_channel.streaming) D.print("returning short array"); + + let cbt = _stream_media(token_id, library_id, rStart, data, rStart, rEnd, size); + + debug if(debug_channel.streaming) D.print("the size " # Nat.toText(cbt.payload.size())); + return { + status_code = 206; + headers = [ + ("Content-Type", contentType), + ("Accept-Ranges", "bytes"), + ("Content-Range", "bytes " # Nat.toText(rStart) # "-" # Nat.toText(rEnd) # "/" # Nat.toText(size)), + //("Content-Range", "bytes 0-"# Nat.toText(size-1) # "/" # Nat.toText(size)), + ("Content-Length", Nat.toText(cbt.payload.size())), + ("Cache-Control","private") + ]; + body = cbt.payload; + streaming_strategy = null; + }; + }; + + + }; + + //handles non-streaming large content + public func handleLargeContent( + state : Types.State, + key : Text, + contentType : Text, + data : CandyTypes.DataZone, + req : httpparser.ParsedHttpRequest + ) : HTTPResponse { + let result = _stream_content(key, 0, data); + + debug if(debug_channel.large_content)D.print("handling large content " # debug_show(result.callback)); + + let canister_id: Text = Principal.toText(state.canister()); + let canister = actor (canister_id) : actor { nftStreamingCallback : shared () -> async () }; + + var b_foundRange : Bool = false; + var start_range : Nat = 0; + var end_range : Nat = 0; + + //nyi: should the data zone cache this? + { + status_code = 200; + headers = [ + ("Content-Type", contentType), + ("accept-ranges", "bytes"), + ("Cache-Control","private"), + ]; + body = result.payload; + streaming_strategy = switch (result.callback) { + case (null) { null; }; + case (? tk) { + ?#Callback({ + token = tk; + callback = canister.nftStreamingCallback; + }); + }; + }; + }; + + }; + + public func _stream_media( + token_id : Text, + library_id :Text, + index : Nat, + data : CandyTypes.DataZone, + rStart : Nat, + rEnd : Nat, + size : Nat, + + ) : { + payload: Blob; // Payload based on the index. + callback: ?StreamingCallbackToken // Callback for next chunk (if applicable). + } { + + debug if(debug_channel.streaming) D.print("in _stream_media"); + debug if(debug_channel.streaming)D.print("token_id " # debug_show(token_id)); + debug if(debug_channel.streaming)D.print("library_id " # debug_show(library_id)); + debug if(debug_channel.streaming)D.print("index " # debug_show(index)); + debug if(debug_channel.streaming)D.print(debug_show(rEnd) # " " # debug_show(rStart) # " "); + + var tracker : Nat = 0; + let buf_size = if(Nat.sub(rEnd,index) >= __MAX_STREAM_CHUNK){ + __MAX_STREAM_CHUNK; + } else { + rEnd - index + 1 : Nat; + }; + + + debug if(debug_channel.streaming)D.print("buffer of size " # debug_show(buf_size)); + let payload : Buffer.Buffer = Buffer.Buffer(buf_size); + var blob_payload = Blob.fromArray([]); + + label getData for(this_item in data.vals()){ + + debug if(debug_channel.streaming) D.print("zone processing" # debug_show(tracker) # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + let chunk = Conversion.valueUnstableToBlob(this_item); + + let chunkSize = chunk.size(); + if(chunkSize + tracker < index){ + debug if(debug_channel.streaming) D.print("skipping chunk"); + tracker += chunkSize; + continue getData; + }; + + debug if(debug_channel.streaming) D.print("current " # debug_show((rStart, rEnd, tracker, chunk.size()))); + + if( + (tracker == rStart) and (tracker + chunk.size() == rEnd + 1) + ){ + debug if(debug_channel.streaming)D.print("matched rstart and rend on whole chunk"); + blob_payload := chunk; + break getData; + }; + + debug if(debug_channel.streaming)D.print("got past the chunk check" # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + debug if(debug_channel.streaming) D.print(debug_show(chunk.size())); + for(this_byte in chunk.vals()){ + debug if(tracker % 1000000 == 0){ + debug if(debug_channel.streaming) D.print(debug_show(tracker % 10000000) # " " # debug_show(tracker) # " " # debug_show(index) # " " # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + }; + if(tracker >= index){ + payload.add(this_byte); + }; + tracker += 1; + if(tracker > rEnd or tracker > Nat.sub(index + __MAX_STREAM_CHUNK, 1)){ + //D.print("broke tracker at " # debug_show(tracker) # " nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + break getData; + } + }; + }; + //D.print("should have the buffer" # debug_show(payload.size())); + //D.print("tracker: " # Nat.toText(tracker)); + + if(blob_payload.size() == 0){ + blob_payload := Blob.fromArray(payload.toArray()); + }; + + let token = if(tracker >= size or tracker >= rEnd){ + debug if(debug_channel.streaming) D.print("found the end, returning null" # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + null; + } else { + debug if(debug_channel.streaming) D.print("_streaming returning the key " # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + ?{ + content_encoding = "gzip"; + index = tracker; + key = "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size); + //key = "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(tracker) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size); + } + }; + + {payload = blob_payload; callback =token}; + }; + + public func _stream_content( + key : Text, + index : Nat, + data : CandyTypes.DataZone, + ) : { + payload: Blob; // Payload based on the index. + callback: ?StreamingCallbackToken // Callback for next chunk (if applicable). + } { + let payload = data.get(index); + debug if(debug_channel.streaming) D.print("in private call back"); + debug if(debug_channel.streaming)D.print(debug_show(data.size())); + if (index + 1 == data.size()) return {payload = Conversion.valueUnstableToBlob(payload); callback = null}; + debug if(debug_channel.streaming)D.print("returning a new key" # key); + debug if(debug_channel.streaming)D.print(debug_show(key)); + {payload = Conversion.valueUnstableToBlob(payload); + callback = ?{ + content_encoding = "gzip"; + index = index + 1; + key = key; + }}; + }; + + + public func stream_media( + token_id : Text, + library_id : Text, + index : Nat, + data : CandyTypes.DataZone, + rStart : Nat, + rEnd : Nat, + size : Nat + ) : StreamingCallbackResponse { + let result = _stream_media( + token_id, + library_id, + index, + data, + rStart, + rEnd, + size + ); + + debug if(debug_channel.streaming)D.print("the media content"); + debug if(debug_channel.streaming)D.print(debug_show(result)); + {body = result.payload; token = result.callback}; + }; + + //determines how a library item should be rendere in an http request + public func renderLibrary( + state : Types.State, + req : httpparser.ParsedHttpRequest, + metadata : CandyTypes.CandyValue, + token_id: Text, + library_id: Text) : HTTPResponse { + + debug if(debug_channel.library) D.print("in render library)"); + + let library_meta = switch(Metadata.get_library_meta(metadata, library_id)){ + case(#err(err)){return _not_found("meta not found - " # token_id # " " # library_id);}; + case(#ok(val)){val}; + + + }; + + debug if(debug_channel.library) D.print("library meta" #debug_show(library_meta)); + + let location_type = switch(Metadata.get_nft_text_property(library_meta, "location_type")){ + case(#err(err)){return _not_found("location type not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + let read_type = switch(Metadata.get_nft_text_property(library_meta, "read")){ + case(#err(err)){return _not_found("read type not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + let location = switch(Metadata.get_nft_text_property(library_meta, "location")){ + case(#err(err)){return _not_found("location type not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + let use_token_id = if(location_type == "canister"){ + debug if(debug_channel.library) D.print("location type is canister"); + token_id; + } else if(location_type == "collection"){ + debug if(debug_channel.library) D.print("location type is collection"); + ""; + } else if(location_type == "web"){ + return { + body = ""; + headers = [("Location", location)]; + status_code = 307; + streaming_strategy = null; + }; + + }else { + return _not_found("library hosted off chain - " # token_id # " " # library_id # " " # location_type); + }; + + debug if(debug_channel.library) D.print("comparing library in allocation" # debug_show((use_token_id, library_id, state.state.allocations))); + let allocation = switch(Map.get<(Text, Text), Types.AllocationRecord>(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (use_token_id, library_id))){ + case(null){ + return _not_found("allocation for token, library not found - " # use_token_id # " " # library_id); + }; + case(?val){val}; + }; + + debug if(debug_channel.library) D.print("found allocation" # debug_show((allocation.canister, state.canister()))); + + + if(allocation.canister != state.canister()){ + debug if(debug_channel.library) D.print("item is not on this server redir to " # Principal.toText(allocation.canister)); + let location = switch(Metadata.get_nft_text_property(library_meta, "location")){ + case(#err(err)){return _not_found("location not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + debug if(debug_channel.library) D.print("have location" # debug_show(location)); + + let path = if(use_token_id == ""){ + "collection/-/" # library_id + } else { + "-/" # use_token_id # "/-/" # library_id + }; + + debug if(debug_channel.library) D.print("got a path " # path); + + debug if(debug_channel.library) D.print("trying " # debug_show(Metadata.get_primary_host(state, use_token_id, Principal.fromBlob("\04")), Metadata.get_primary_port(state, use_token_id, Principal.fromBlob("\04")), Metadata.get_primary_protocol(state, use_token_id,Principal.fromBlob("\04")))); + + let address = switch( + Metadata.get_primary_host(state, use_token_id, Principal.fromBlob("\04")), + Metadata.get_primary_port(state, use_token_id, Principal.fromBlob("\04")), + Metadata.get_primary_protocol(state, use_token_id, Principal.fromBlob("\04"))){ + case(#ok(host), #ok(port), #ok(protocol)){ + protocol # "://" # host # (if(port=="443" or port == "80"){""}else{":" # port}) # "/" # path # "?canisterId=" # Principal.toText(allocation.canister) + }; + //todo: the below may be broken...you may need to add the defaults + case(_,_,_){ + if(Text.startsWith(location, #text("http")) == true){ + location + } else { + "https://" # Principal.toText(allocation.canister) # ".ic0.app/" # location + }; + + }; + }; + + debug if(debug_channel.library) D.print("got a location " # address); + + debug if(debug_channel.library) D.print("trying " # debug_show(Metadata.get_primary_host(state, use_token_id,Principal.fromBlob("\04")), Metadata.get_primary_port(state,use_token_id, Principal.fromBlob("\04")), Metadata.get_primary_protocol(state, use_token_id, Principal.fromBlob("\04")))); + + + return { + body = ""; + headers = [("Location", address),("icx-proxy-forward","true")]; + status_code = 307; + streaming_strategy = null; + }; + }; + + if(read_type == "owner"){ //own this NFT + switch(http_nft_owner_check(state, req, metadata)) { + case(#err(err)) { + return _not_found(err); + }; + case(#ok()) {}; + }; + }; + + if(read_type == "collection_owner"){ //own the collection + switch(http_owner_check(state, req)) { + case(#err(err)) { + return _not_found(err); + }; + case(#ok()) {}; + }; + }; + + if(location_type == "canister"){ + //on this canister + debug if(debug_channel.library) D.print("canister"); + let content_type = switch(Metadata.get_nft_text_property(library_meta, "content_type")){ + case(#err(err)){return _not_found("content type not found");}; + case(#ok(val)){val}; + }; + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){return _not_found("item not found")}; + case(#ok(val)){val}; + }; + + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return _not_found("file data not found"); + }; + case(?zone){ + debug if(debug_channel.library) D.print("size of zone" # debug_show(zone.size())); + + var split : [Text] = []; + var split2 : [Text] = []; + var start : ?Nat = null; + var end : ?Nat = null; + var b_foundRange : Bool = false; + for(this_header in req.headers.original.vals()){ + + if(this_header.0 == "range" or this_header.0 == "Range"){ + b_foundRange := true; + split := Iter.toArray(Text.tokens(this_header.1, #char('='))); + split2 := Iter.toArray(Text.tokens(split[1],#char('-'))); + if(split2.size() == 1){ + start := Conversion.textToNat(split2[0]); + } else { + start := Conversion.textToNat(split2[0]); + end := Conversion.textToNat(split2[1]); + }; + debug if(debug_channel.library) D.print("split2 " # debug_show(split2)); + }; + }; + + + if(b_foundRange == true){ + //range request + debug if(debug_channel.library) D.print("dealing with a range request"); + let result = handle_stream_content( + state, + token_id, + library_id, + start, + end, + content_type, + zone, + req + ); + debug if(debug_channel.library)D.print("returning with callback:"); + debug if(debug_channel.library)D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + + } else { + debug if(debug_channel.library)D.print("Not a range requst"); + + /* + remove this comment to get a dump of the actual headers that made it through. + return { + status_code = 200; + headers = [("Content-Type", "text/plain")]; + body = Conversion.valueToBlob(#Text(debug_show(req.headers.original) # "|||" # debug_show(req.original.headers))); + streaming_strategy = null; + }; */ + //standard content request + if(zone.size() > 1){ + //streaming required + let result = handleLargeContent( + state, + "nft/" # token_id # "|" # library_id, + content_type, + zone, + req + ); + debug if(debug_channel.library)D.print("returning with callback"); + debug if(debug_channel.library)D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + } else { + //only one chunck + return { + status_code = 200; + headers = [("Content-Type", content_type)]; + body = Conversion.valueUnstableToBlob(zone.get(0)); + streaming_strategy = null; + }; + }; + }; + + }; + }; + } else if(location_type == "collection"){ + //on this canister but with collection id + debug if(debug_channel.library)D.print("collection"); + + let use_token_id = ""; + + + let content_type = switch(Metadata.get_nft_text_property(library_meta, "content_type")){ + case(#err(err)){return _not_found("content type not found");}; + case(#ok(val)){val}; + }; + + debug if(debug_channel.library)D.print("collection content type is " # content_type); + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, use_token_id, library_id)){ + case(#err(err)){return _not_found("item not found")}; + case(#ok(val)){val}; + }; + + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return _not_found("file data not found"); + }; + case(?zone){ + debug if(debug_channel.library) D.print("size of zone"); + debug if(debug_channel.library) D.print(debug_show(zone.size())); + + var split : [Text] = []; + var split2 : [Text] = []; + var start : ?Nat = null; + var end : ?Nat = null; + var b_foundRange : Bool = false; + + + + for(this_header in req.headers.original.vals()){ + + if(this_header.0 == "range" or this_header.0 == "Range"){ + b_foundRange := true; + split := Iter.toArray(Text.tokens(this_header.1, #char('='))); + split2 := Iter.toArray(Text.tokens(split[1],#char('-'))); + if(split2.size() == 1){ + start := Conversion.textToNat(split2[0]); + } else { + start := Conversion.textToNat(split2[0]); + end := Conversion.textToNat(split2[1]); + }; + }; + }; + + + if(b_foundRange == true){ + //range request + debug if(debug_channel.library) D.print("dealing with a range request"); + let result = handle_stream_content( + state, + use_token_id, + library_id, + start, + end, + content_type, + zone, + req + ); + debug if(debug_channel.library) D.print("returning with callback:"); + debug if(debug_channel.library) D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + + } else { + debug if(debug_channel.library) D.print("Not a range requst"); + + /* + remove this comment to get a dump of the actual headers that made it through. + return { + status_code = 200; + headers = [("Content-Type", "text/plain")]; + body = Conversion.valueToBlob(#Text(debug_show(req.headers.original) # "|||" # debug_show(req.original.headers))); + streaming_strategy = null; + }; */ + //standard content request + if(zone.size() > 1){ + //streaming required + let result = handleLargeContent( + state, + "nft/" # use_token_id # "|" # library_id, + content_type, + zone, + req + ); + debug if(debug_channel.library) D.print("returning with callback"); + debug if(debug_channel.library) D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + } else { + //only one chunck + return { + status_code = 200; + headers = [("Content-Type", content_type)]; + body = Conversion.valueUnstableToBlob(zone.get(0)); + streaming_strategy = null; + }; + }; + }; + + }; + }; + + + + } else { + //redirect to asset + let location = switch(Metadata.get_nft_text_property(library_meta, "location")){ + case(#err(err)){return _not_found("location not found");}; + case(#ok(val)){val}; + }; + debug if(debug_channel.library) D.print("redirecting to asset" # location); + return { + body = ""; + headers = [("Location", location)]; + status_code = 307; + streaming_strategy = null; + }; + }; + }; + + public func renderSmartRoute( + state : Types.State, + req : httpparser.ParsedHttpRequest, + metadata : CandyTypes.CandyValue, + token_id: Text, smartRoute: Text) : HTTPResponse { + //D.print("path is ex"); + let library_id = switch(Metadata.get_nft_text_property(metadata, smartRoute)){ + case(#err(err)){return _not_found("library not found");}; + case(#ok(val)){val}; + }; + //D.print(library_id); + + return renderLibrary(state, req, metadata, token_id, library_id); + }; + + //standard response for a 404 + private func _not_found(message: Text) : HTTPResponse{ + return{ + body = Text.encodeUtf8("404 Not found :" # message); + headers : [http.HeaderField] = []; + status_code : Nat16= 404; + streaming_strategy : ?StreamingStrategy = null; + }; + }; + + public func nftStreamingCallback( + tk : StreamingCallbackToken, + state: Types.State) : StreamingCallbackResponse { + debug if(debug_channel.streaming) D.print("in streaming callback"); + let path = Iter.toArray(Text.tokens(tk.key, #text("/"))); + debug if(debug_channel.streaming) D.print(debug_show(path)); + if (path.size() == 2 and path[0] == "nft") { + debug if(debug_channel.streaming) D.print("private nft"); + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + + let (token_id, library_id) = if(path2.size() == 1){ + ("", path2[0]); + } else { + ( path2[0], path2[1]); + }; + debug if(debug_channel.streaming) D.print(debug_show(path2)); + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){ + debug if(debug_channel.streaming) D.print("an error" # debug_show(err)); + return { + body = Blob.fromArray([]); + token = null; + }}; + case(#ok(val)){val}; + }; + + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(?zone){ + return stream_content( + tk.key, + tk.index, + zone, + ); + }; + }; + + + } else if(path.size() == 2 and path[0] == "nft-m"){ + //have to get data differently + debug if(debug_channel.streaming) D.print("in media pathway"); + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + //nyi: handle private nft + let (token_id, library_id, rStartText, rEndText, sizeText) = if(path2.size() == 1){ + ("", path2[0], path2[1], path2[2], path2[3]); + } else { + ( path2[0], path2[1], path2[2], path2[3], path2[4]); + }; + debug if(debug_channel.streaming) D.print(debug_show(path2)); + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){ + debug if(debug_channel.streaming) D.print("no item"); + return { + body = Blob.fromArray([]); + token = null; + }}; + case(#ok(val)){val}; + }; + switch(item.getOpt(1)){ + case(null){ + //nofiledata + debug if(debug_channel.streaming) D.print("no file bytes found"); + return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(?zone){ + debug if(debug_channel.streaming) D.print("about to call stream media from the callback pathway"); + let rStart = Option.get(Conversion.textToNat(rStartText),0); + let rEnd = Option.get(Conversion.textToNat(rEndText),0); + let size = Option.get(Conversion.textToNat(sizeText),0); + debug if(debug_channel.streaming) D.print(debug_show(rStart, rEnd, size)); + return stream_media( + token_id, + library_id, + tk.index, + zone, + rStart, + rEnd, + size + ); + }; + }; + + }; + { + body = Blob.fromArray([]); + token = null; + }; + }; + + private func stream_content( + key : Text, + index : Nat, + data : CandyTypes.DataZone, + ) : StreamingCallbackResponse { + let result = _stream_content( + key, + index, + data, + ); + + D.print("the stream content " # key); + D.print(debug_show(result)); + { + body = result.payload; + token = result.callback; + }; + }; + + public func http_request_streaming_callback( + tk : StreamingCallbackToken, + state : Types.State) : StreamingCallbackResponse { + + debug if(debug_channel.large_content) D.print("in the request_streamint callbak"); + debug if(debug_channel.large_content) D.print(debug_show(tk)); + if (Text.startsWith(tk.key, #text("nft/"))) { + let path = Iter.toArray(Text.tokens(tk.key, #text("/"))); + + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + + + //nyi: handle private nft + debug if(debug_channel.large_content) D.print(debug_show(path)); + debug if(debug_channel.large_content) D.print(debug_show(path2)); + + let (token_id, library_id) = if(path2.size() == 1){ + ("", path2[0]); + } else { + ( path2[0], path2[1]); + }; + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(#ok(val)){val}; + }; + + //D.print("have item"); + + switch (item.getOpt(1)) { + case (null) { }; + case (?zone) { + return stream_content( + tk.key, + tk.index, + zone, + ); + }; + }; + } else if (Text.startsWith(tk.key, #text("nft-m/"))){ + let path = Iter.toArray(Text.tokens(tk.key, #text("/"))); + + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + //nyi: handle private nft + debug if(debug_channel.large_content) D.print(debug_show(path)); + debug if(debug_channel.large_content) D.print(debug_show(path2)); + + let (token_id, library_id, rStartText, rEndText, sizeText) = if(path2.size() == 1){ + ("", path2[0], path2[1], path2[2], path2[3]); + } else { + ( path2[0], path2[1], path2[2], path2[3], path2[4]); + }; + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(#ok(val)){val}; + }; + + debug if(debug_channel.large_content) //D.print("have item"); + + switch (item.getOpt(1)) { + case (null) { }; + case (?zone) { + return stream_media( + token_id, + library_id, + + tk.index, + zone, + Option.get(Conversion.textToNat(rStartText),0),//rstart + + Option.get(Conversion.textToNat(rEndText),0),//rend + Option.get(Conversion.textToNat(sizeText),0),//size + ); + }; + }; + + } else { + //handle static assests if we have them + }; + return { + body = Blob.fromArray([]); + token = null; + }; + }; + + //pulls + private func json(message: CandyTypes.CandyValue, _query: ?Text) : HTTPResponse { + let message_response = switch(_query) { + case(null) { + message + }; + case(?q) { + switch(splitQuery(Text.replace(q, #text("--"), "~"), '~')) { + case(#ok(qs)) { + switch(get_deep_properties(message, qs)) { + case(#ok(data)) { + data; + }; + case(#back){ + message; + }; + case(#err(err)) { + return _not_found("properties not found: " # q); + }; + + }; + }; + case(#err(err)) { + return _not_found(err); + }; + /* case(_){ + return _not_found("unexpected value: " # debug_show(message)); + }; */ + }; + }; + }; + + return { + body = Text.encodeUtf8(value_to_json(message_response)); + headers = [(("Content-Type", "application/json")),(("Access-Control-Allow-Origin", "*"))]; + status_code = 200; + streaming_strategy = null; + }; + }; + + type sQuery = { #standard: Text; #multi: Text }; + //handles queries + public func splitQuery(q: Text, p: Char): Result.Result, Text> { + var queries = List.nil(); + var key : Text = ""; + var multi : Bool = false; + var open : Bool = false; + + let addQueries = func(key: Text, current: List.List, multi: Bool): Result.Result, Text> { + //D.print(debug_show(multi, key)); + if(multi) { + if(Text.contains(key, #char(p))) { + return #err("multi: not supported split") + }; + #ok(List.push(#multi(key), current)); + } else { + if(Text.contains(key, #char(','))) { + return #err("Standard: not supported [,]"); + }; + #ok(List.push(#standard(key), current)); + }; + }; + + for(thisChar in Text.toIter(q)) { + if(thisChar == '[') { + open := true; + multi := true; + } else if(thisChar == ']') { + open := false; + } else { + if(thisChar == p and open == false) { + switch(addQueries(key, queries, multi)) { + case(#ok(res)){queries:=res;}; + case(err){return err;}; + }; + multi := false; + key := ""; + } else { + key:= key # Char.toText(thisChar); + }; + }; + }; + + switch(addQueries(key, queries, multi)) { + case(#ok(res)){queries:=res;}; + case(err){return err;}; + }; + return #ok(List.reverse(queries)); + }; + + //gets prroperties from deep in a structure + public func get_deep_properties(metadata: CandyTypes.CandyValue, qs: List.List): {#ok: CandyTypes.CandyValue; #err; #back} { + if(List.isNil(qs)) { + return #back(); + }; + + let item = List.pop(qs); + + let key = switch(item.0){ + case(null){return #err;}; + case(?val){val;}; + }; + let listQs = item.1; + + switch(metadata) { + case(#Class(properties)) { + switch(key) { + case(#standard(standard)) { + switch(Properties.getClassProperty(metadata, standard)){ + case(null) { + return #err(); + }; + case(?val){ + switch(get_deep_properties(val.value, listQs)) { + case(#ok(res)){#ok(res);}; + case(#back()){#ok(#Class([val]));}; + case(err){err;}; + }; + }; + }; + }; + case(#multi(multi)) { + if(List.isNil(listQs)) { + let props = Array.map( + split_text(multi, ','), + func (key: Text): CandyTypes.Query { + return { + name = key; + next = []; + }; + } + ); + + return switch(Properties.getProperties(properties, props)) { + case(#ok(val)){#ok(#Class(val));}; + case(#err(err)){#err()}; + }; + } else { + return #err(); + }; + }; + }; + }; + case(#Array(_)) { + switch(key) { + case(#standard(standard)) { + var len = 0; + for(this_item in Conversion.valueToValueArray(metadata).vals()) { + if(Nat.toText(len) == standard) { + switch(get_deep_properties(this_item, listQs)) { + case(#ok(res)){return #ok(res);}; + case(#back()){return #ok(this_item);}; + case(err){return err;}; + }; + }; + len := len + 1; + }; + }; + case(#multi(multi)) { + var splitMulti: [Text] = split_text(multi, ','); + let list: Buffer.Buffer = Buffer.Buffer(1); + var len = 0; + for(this_item in Conversion.valueToValueArray(metadata).vals()) { + switch(Array.find(splitMulti, func (key: Text) { + return key == Nat.toText(len); + })) { + case(null) {}; + case(?find) { + switch(get_deep_properties(this_item, listQs)) { + case(#ok(res)){ + list.add(res); + }; + case(#back()){ + list.add(this_item); + }; + case(err){return err;}; + }; + }; + }; + len := len + 1; + }; + + if(list.size() == splitMulti.size()) { + return #ok(#Array(#thawed(list.toArray()))); + } else { + return #err(); + }; + }; + }; + + return #err(); + }; + case(_) { + if(List.isNil(qs)) { + return #back(); + }; + + return #err(); + }; + }; + }; + + //converst a candu value to JSON + public func value_to_json(val: CandyTypes.CandyValue): Text { + switch(val){ + //nat + case(#Nat(val)){ Nat.toText(val)}; + //text + case(#Text(val)){ "\"" # val # "\""; }; + //class + case(#Class(val)){ + var body: Buffer.Buffer = Buffer.Buffer(1); + for(this_item in val.vals()){ + body.add("\"" # this_item.name # "\"" # ":" # value_to_json(this_item.value)); + }; + + return "{" # Text.join(",", body.vals()) # "}"; + }; + //array + case(#Array(val)){ + switch(val){ + case(#frozen(val)){ + var body: Buffer.Buffer = Buffer.Buffer(1); + for(this_item in val.vals()){ + body.add(value_to_json(this_item)); + }; + + return "[" # Text.join(",", body.vals()) # "]"; + }; + case(#thawed(val)){ + var body: Buffer.Buffer = Buffer.Buffer(1); + for(this_item in val.vals()){ + body.add(value_to_json(this_item)); + }; + + return "[" # Text.join(",", body.vals()) # "]"; + }; + }; + }; + //bytes + case(#Bytes(val)){ + switch(val){ + case(#frozen(val)){ + return "\"" # "CandyHex.encode" # "\"";//CandyHex.encode(val); + }; + case(#thawed(val)){ + return "\"" # "CandyHex.encode" # "\"";//CandyHex.encode(val); + }; + }; + }; + //bytes + case(#Blob(val)){ + + return "\"" # "CandyHex.encode" # "\"";//CandyHex.encode(val); + + }; + //principal + case(#Principal(val)){ "\"" # Principal.toText(val) # "\"";}; + case(_){"";}; + }; + }; + + public func split_text(q: Text, p: Char): [Text] { + var queries: Buffer.Buffer = Buffer.Buffer(1); + var key : Text = ""; + + for(thisChar in Text.toIter(q)) { + if(thisChar != '[' and thisChar != ']') { + if(thisChar == p) { + queries.add(key); + key := ""; + } else { + key:= key # Char.toText(thisChar); + }; + }; + }; + queries.add(key); + return queries.toArray(); + }; + + //checks that a access token holder is the collection owner + //**NOTE: NOTE: Data stored on the IC should not be considered secure. It is possible(though not probable) that node operators could look at the data at rest and see access tokens. The only current method for hiding data from node providers is to encrypt the data before putting it into a canister. It is highly recommended that any personally identifiable information is encrypted before being stored on a canister with a separate and secure decryption system in place.** + public func http_owner_check(stateBody : Types.State, req : httpparser.ParsedHttpRequest): Result.Result<(), Text> { + switch(req.url.queryObj.get("access")) { + case(null) { + return #err("no access code in request when nft not minted"); + }; + case(?access_token) { + switch(stateBody.access_tokens.get(access_token)) { + case(null) { + return #err("identity not found by access_token : " # access_token); + }; + case(?info) { + let { identity; expires; } = info; + + if(stateBody.state.collection_data.owner != identity) { + return #err("not an owner"); + }; + + if(expires < Time.now()) { + return #err("access expired"); + }; + }; + }; + }; + }; + + #ok(); + }; + + //checks that a access token holder is an owner of an NFT + //**NOTE: NOTE: Data stored on the IC should not be considered secure. It is possible(though not probable) that node operators could look at the data at rest and see access tokens. The only current method for hiding data from node providers is to encrypt the data before putting it into a canister. It is highly recommended that any personally identifiable information is encrypted before being stored on a canister with a separate and secure decryption system in place.** + public func http_nft_owner_check(stateBody : Types.State, req : httpparser.ParsedHttpRequest, metadata: CandyTypes.CandyValue): Result.Result<(), Text> { + switch(req.url.queryObj.get("access")) { + case(null) { + return #err("no access code in request when nft not minted"); + }; + case(?access_token) { + switch(stateBody.access_tokens.get(access_token)) { + case(null) { + return #err("identity not found by access_token : " # access_token); + }; + case(?info) { + let { identity; expires; } = info; + + switch(Metadata.is_nft_owner(metadata, #principal(identity))){ + case(#ok(val)){ + if(val == false){ + return #err("not an owner"); + }; + }; + case(#err(err)){ + return #err("identity not found by access_token : " # access_token); + }; + }; + + if(expires < Time.now()) { + return #err("access expired"); + }; + }; + }; + }; + }; + + #ok(); + }; + + //handles http requests + public func http_request( + state : Types.State, + rawReq: HttpRequest, + caller : Principal): (HTTPResponse) { + + debug if(debug_channel.request) D.print("a page was requested"); + + let req = httpparser.parse(rawReq); + let {host; port; protocol; path; queryObj; anchor; original = url} = req.url; + + + let path_size = req.url.path.array.size(); + let path_array = req.url.path.array; + + + debug if(debug_channel.request) D.print(debug_show(rawReq)); + + if(path_size == 0) { + return { + body = Text.encodeUtf8 (" An Origyn NFT Canister \n"); + headers = []; + status_code = 200; + streaming_strategy = null; + }; + }; + + + if(path_size > 0){ + if(path_array[0] == "-"){ + if(path_size > 1){ + debug if(debug_channel.request) D.print("on path print area"); + debug if(debug_channel.request) D.print(debug_show(path_size)); + let token_id = path_array[1]; + + let metadata = switch(Map.get(state.state.nft_metadata, Map.thash, token_id)){ + case(null){ + return _not_found("metadata not found"); + }; + case(?val){ + val; + }; + }; + let is_minted = Metadata.is_minted(metadata); + if(path_size == 2){ + //show the main asset + debug if(debug_channel.request) D.print("should be showing the main asset unless unmited" # debug_show(is_minted)); + if(is_minted == false){ + return renderSmartRoute(state, req, metadata, token_id, Types.metadata.hidden_asset); + }; + return renderSmartRoute(state, req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_size == 3){ + if(path_array[2] == "ex"){ + let aResponse = renderSmartRoute(state ,req, metadata, token_id, Types.metadata.experience_asset); + if(is_minted == false and aResponse.status_code==404){ + return renderSmartRoute(state ,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return aResponse; + }; + if(path_array[2] == "preview"){ + if(is_minted == false){ + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.preview_asset); + }; + if(path_array[2] == "hidden"){ + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + if(path_array[2] == "primary"){ + if(is_minted == false){ + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_array[2] == "info"){ + return json(Metadata.get_clean_metadata(metadata, caller), queryObj.get("query")); + }; + if(path_array[2] == "library"){ + let libraries = switch(Metadata.get_nft_library(Metadata.get_clean_metadata(metadata, caller), ?caller)){ + case(#err(err)){return _not_found("libraries not found");}; + case(#ok(val)){ val }; + }; + return json(libraries, null); + }; + }; + if(path_size > 3){ + if(path_array[2] == "-") { + let library_id = path_array[3]; + if(path_size == 4){ + if (is_minted == false) { + switch(http_owner_check(state, req)) { + case(#err(err)) { + return _not_found(err); + }; + case(#ok()) {}; + }; + }; + + return renderLibrary(state, req, metadata, token_id, library_id); + }; + if(path_size == 5){ + if(path_array[4] == "info"){ + let library_meta = switch(Metadata.get_library_meta(metadata, library_id)){ + case(#err(err)){return _not_found("library by " # library_id # " not found");}; + case(#ok(val)){val}; + }; + return json(library_meta, queryObj.get("query")); + }; + }; + }; + }; + }; + } else if(path_array[0] == "collection"){ + debug if(debug_channel.request) D.print("found collection"); + + + debug if(debug_channel.request) D.print("on path print area"); + debug if(debug_channel.request) D.print(debug_show(path_size)); + let token_id = ""; + + let metadata = switch(Map.get(state.state.nft_metadata, Map.thash,token_id)){ + case(null){ + return _not_found("metadata not found"); + }; + case(?val){ + val; + }; + }; + if(path_size > 1){ + if(path_array[1] == "-"){ + + debug if(debug_channel.request) D.print("found -"); + + if(path_size == 2){ + // https://exos.surf/-/canister_id/collection/ + debug if(debug_channel.request) D.print("render smart route 2 collection" # token_id); + + return renderSmartRoute(state, req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_size > 2){ + + let library_id = path_array[2]; + if(path_size == 3){ + debug if(debug_channel.request) D.print("render library " # token_id # " " # library_id); + // https://exos.surf/-/canister_id/collection/-/library_id + return renderLibrary(state, req, metadata, token_id, library_id); + }; + if(path_size == 4){ + if(path_array[4] == "info"){ + /// https://exos.surf/-/canister_id/collection/-/library_id/info + debug if(debug_channel.request) D.print("render info " # token_id # " " # library_id); + + let library_meta = switch(Metadata.get_library_meta(metadata, library_id)){ + case(#err(err)){return _not_found("library by " # library_id # " not found");}; + case(#ok(val)){val}; + }; + return json(library_meta, queryObj.get("query")); + }; + }; + + }; + }; + if(path_array[1] == "ex"){ + debug if(debug_channel.request) D.print("render ex " # token_id ); + let aResponse = renderSmartRoute(state ,req, metadata, token_id, Types.metadata.experience_asset); + if(aResponse.status_code==404){ + return renderSmartRoute(state ,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return aResponse; + }; + if(path_array[1] == "preview"){ + debug if(debug_channel.request) D.print("render perview " # token_id ); + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.preview_asset); + }; + if(path_array[1] == "hidden"){ + debug if(debug_channel.request) D.print("render hidden " # token_id ); + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + if(path_array[1] == "primary"){ + debug if(debug_channel.request) D.print("render primary " # token_id ); + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_array[1] == "info"){ + debug if(debug_channel.request) D.print("render info " # token_id ); + return json(Metadata.get_clean_metadata(metadata, caller), queryObj.get("query")); + }; + if(path_array[1] == "library"){ + debug if(debug_channel.request) D.print("render library " # token_id ); + let libraries = switch(Metadata.get_nft_library(Metadata.get_clean_metadata(metadata, caller), ?caller)){ + case(#err(err)){return _not_found("libraries not found");}; + case(#ok(val)){ val }; + }; + return json(libraries, null); + }; + }; + } else if(path_array[0] == "metrics"){ + return { + body = Text.encodeUtf8("Metrics page :"); + headers = []; + status_code = 200; + streaming_strategy = null; + }; + }; + }; + + return _not_found("nyi"); + }; + + +} diff --git a/src/origyn_nft_reference/ledger_interface.mo b/src/origyn_nft_reference/ledger_interface.mo new file mode 100644 index 0000000..ef169df --- /dev/null +++ b/src/origyn_nft_reference/ledger_interface.mo @@ -0,0 +1,313 @@ +import Principal "mo:base/Principal"; +import Time "mo:base/Time"; +import Blob "mo:base/Blob"; +import Nat64 "mo:base/Nat64"; +import Nat32 "mo:base/Nat32"; +import Text "mo:base/Text"; +import D "mo:base/Debug"; +import Result "mo:base/Result"; +import Int "mo:base/Int"; +import Nat "mo:base/Nat"; +import Error "mo:base/Error"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Types "types"; +import NFTUtils "utils"; +import DFXTypes "dfxtypes"; +import Hex "mo:encoding/Hex"; + +class Ledger_Interface() { + + //this file provides services around moving tokens around a standard ledger(ICP/OGY) + + let debug_channel = { + deposit = false; + sale = false; + transfer = false; + }; + + /* + + validate deposit was used before we implemented sub accounts. We are leaving it here as it is + an example of how one could implement this using dip20 without implementing transferFrom + + public func validateDeposit(host: Principal, deposit : Types.DepositDetail, caller: Principal) : async Result.Result { + //D.print("in validate ledger deposit"); + //D.print(Principal.toText(host)); + //D.print(debug_show(deposit)); + let ledger = switch(deposit.token){ + case(#ic(detail)){ + detail; + }; + case(_){ + return #err(Types.errors(#improper_interface, "ledger_interface - validate deposit - not ic" # debug_show(deposit), ?caller)); + } + }; + //D.print(debug_show(canister)); + //D.print(debug_show(block)); + let ledger_actor : DFXTypes.Service = actor(Principal.toText(ledger.canister)); + + try{ + + + + + //D.print("comparing hosts"); + //D.print(debug_show(Blob.fromArray(transfer.to))); + //D.print(debug_show(Blob.fromArray(AccountIdentifier.fromPrincipal(host, null)))); + + if( transfer.to != Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(host, null)))){ + //D.print("Host didnt match"); + return #err(Types.errors(#validate_trx_wrong_host, "ledger_interface - validate deposit - bad host" # debug_show(deposit) # " should be " # Principal.toText(host), ?caller)); + }; + + //D.print("comparing buyer"); + //D.print(debug_show(transfer.from)); + //D.print(debug_show(Blob.fromArray(transfer.from))); + //D.print(debug_show(AccountIdentifier.toText(transfer.from))); + + //D.print(debug_show(Text.decodeUtf8(Blob.fromArray(transfer.from)))); + //D.print(debug_show(#account_id(Opt.get(Text.decodeUtf8(Blob.fromArray(transfer.from)),"")))); + //D.print(debug_show(deposit.buyer)); + if(Types.account_eq(#account_id(Hex.encode(Blob.toArray(transfer.from))), deposit.buyer) == false){ + //D.print("from and buyer didnt match " # debug_show(transfer.from) # " " # debug_show(deposit.buyer)); + return #err(Types.errors(#validate_deposit_wrong_buyer, "ledger_interface - validate deposit - bad buyer" # debug_show(deposit), ?caller)); + }; + + if(Nat64.toNat(transfer.amount.e8s) != deposit.amount){ + //D.print("amount didnt match"); + return #err(Types.errors(#validate_deposit_wrong_amount, "ledger_interface - validate deposit - bad amount" # debug_show(deposit), ?caller)); + }; + } catch (e){ + return #err(Types.errors(#validate_deposit_failed, "ledger_interface - validate deposit - ledger throw " # Error.message(e) # debug_show(deposit), ?caller)); + }; + //D.print("returning true"); + return #ok(true); + }; */ + + //moves a deposit from a deposit subaccount to an escrow subaccount + public func transfer_deposit(host: Principal, escrow : Types.EscrowRequest, caller: Principal) : async Result.Result<{transaction_id: Types.TransactionID; subaccount_info: Types.SubAccountInfo}, Types.OrigynError> { + debug if(debug_channel.deposit) D.print("in transfer_deposit ledger deposit"); + debug if(debug_channel.deposit) D.print(Principal.toText(host)); + debug if(debug_channel.deposit) D.print(debug_show(escrow)); + + //nyi: extra safety make sure the caller is the buyer(or the network?) + let escrow_account_info : Types.SubAccountInfo = NFTUtils.get_escrow_account_info({ + amount = escrow.deposit.amount; + buyer = escrow.deposit.buyer; + seller = escrow.deposit.seller; + token = escrow.deposit.token; + token_id = escrow.token_id; + }, host); + + let deposit_account = NFTUtils.get_deposit_info(escrow.deposit.buyer, host); + + let ledger = switch(escrow.deposit.token){ + case(#ic(detail)){ + detail; + }; + case(_){ + return #err(Types.errors(#improper_interface, "ledger_interface - validate deposit - not ic" # debug_show(escrow), ?caller)); + } + }; + + try{ + //D.print("sending transfer blocks # " # debug_show(escrow.deposit.amount - ledger.fee)); + + let result = await transfer({ + ledger = ledger.canister; + to = host; + //do not subract the fee...you need the full amount in the account. User needs to send in the fee as extra. + //in the future we may want to actualluy add the fee if the buyer is going to pay all fees. + amount = Nat64.fromNat(escrow.deposit.amount); + fee = Nat64.fromNat(ledger.fee); + memo = Nat64.fromNat(Nat32.toNat(Text.hash("com.origyn.nft.escrow_from_deposit" # debug_show(escrow)))); + caller = caller; + to_subaccount = ?escrow_account_info.account.sub_account; + from_subaccount = ?deposit_account.account.sub_account; + }); + + let result_block = switch(result){ + case(#ok(val)){ + val; + }; + case(#err(err)){ + return #err(Types.errors(#validate_deposit_failed, "ledger_interface - transfer deposit failed " # debug_show(escrow.deposit) # " " # debug_show(err), ?caller)); + }; + }; + + return #ok({transaction_id= result_block; subaccount_info = escrow_account_info}); + + } catch (e){ + return #err(Types.errors(#validate_deposit_failed, "ledger_interface - validate deposit - ledger throw " # Error.message(e) # debug_show(escrow.deposit), ?caller)); + }; + }; + + //allows a user to withdraw money from a sale + public func transfer_sale(host: Principal, escrow : Types.EscrowReceipt, token_id : Text, caller: Principal) : async Result.Result<(Types.TransactionID, Types.SubAccountInfo, Nat), Types.OrigynError> { + debug if(debug_channel.sale) D.print("in transfer_sale ledger sale"); + debug if(debug_channel.sale) D.print(Principal.toText(host)); + debug if(debug_channel.sale) D.print(debug_show(escrow)); + + //nyi: an extra layer of security? + + D.print("in transfer sale" # token_id # debug_show(Time.now())); + + let basic_info = { + amount = escrow.amount; + buyer = escrow.buyer; + seller = escrow.seller; + token = escrow.token; + token_id = escrow.token_id; + }; + + let escrow_account_info : Types.SubAccountInfo = NFTUtils.get_escrow_account_info(basic_info, host); + + let sale_account_info = NFTUtils.get_sale_account_info(basic_info, host); + + debug if(debug_channel.sale) D.print("sale info used " # debug_show(sale_account_info)); + + let ledger = switch(escrow.token){ + case(#ic(detail)){ + detail; + }; + case(_){ + return #err(Types.errors(#improper_interface, "ledger_interface - validate deposit - not ic" # debug_show(escrow), ?caller)); + } + }; + + if(escrow.amount <= ledger.fee){ + return #err(Types.errors(#improper_interface, "ledger_interface - amount is equal or less than fee - not ic" # debug_show(escrow), ?caller)); + + }; + + try{ + debug if(debug_channel.sale) D.print("sending transfer blocks # " # debug_show((Nat.sub(escrow.amount,ledger.fee), sale_account_info.account.sub_account) )); + + D.print("memo will be com.origyn.nft.sale_from_escrow" # debug_show(escrow) # token_id); + let result = await transfer({ + ledger = ledger.canister; + to = host; + amount = Nat64.fromNat(escrow.amount - ledger.fee); + fee = Nat64.fromNat(ledger.fee); + memo = Nat64.fromNat(Nat32.toNat(Text.hash("com.origyn.nft.sale_from_escrow" # debug_show(escrow) # token_id))); + caller = caller; + to_subaccount = ?sale_account_info.account.sub_account; + from_subaccount = ?escrow_account_info.account.sub_account; + //created_at_time = ?{timestamp_nanos = Nat64.fromNat(Int.abs(Time.now()))} + }); + + let result_block = switch(result){ + case(#ok(val)){ + debug if(debug_channel.sale) D.print("sending to sale account was succesful" # debug_show(val)); + val; + }; + case(#err(err)){ + return #err(Types.errors(#validate_deposit_failed, "ledger_interface - transfer deposit failed " # debug_show(escrow) # " " # debug_show(err), ?caller)); + }; + }; + + return #ok(result_block, sale_account_info, ledger.fee); + + } catch (e){ + return #err(Types.errors(#validate_deposit_failed, "ledger_interface - validate deposit - ledger throw " # Error.message(e) # debug_show(escrow), ?caller)); + }; + }; + + + //a raw transfer + private func transfer(request : { + ledger: Principal; + to: Principal; + to_subaccount: ?Blob; + from_subaccount: ?Blob; + amount: Nat64; + fee: Nat64; + memo: Nat64; + caller: Principal + }) : async Result.Result { + D.print("in transfeledger"); + D.print(Principal.toText(request.ledger)); + + + let ledger_actor : DFXTypes.Service = actor(Principal.toText(request.ledger)); + + + let to_account = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(request.to, switch(request.to_subaccount){case(null){null}; case(?val){?Blob.toArray(val)}})); + + + debug if(debug_channel.transfer) D.print("transfering"); + debug if(debug_channel.transfer) D.print("from account" # debug_show(request.from_subaccount)); + debug if(debug_channel.transfer) D.print("to account" # debug_show(Blob.fromArray(to_account))); + try{ + debug if(debug_channel.transfer) D.print("sending transfer blocks # " # debug_show(request)); + let result = await ledger_actor.transfer({ + to = Blob.fromArray(to_account); + fee = {e8s = request.fee}; + memo = request.memo; + from_subaccount = switch(request.from_subaccount){ case(null){null;};case(?val){?Blob.toArray(val)}}; + created_at_time = ?{timestamp_nanos = Nat64.fromNat(Int.abs(Time.now()))}; + amount = {e8s = request.amount}}); + + debug if(debug_channel.transfer) D.print("result is " # debug_show(result)); + let result_block = switch(result){ + case(#Ok(val)){ + val; + }; + case(#Err(err)){ + return #err(Types.errors(#improper_interface, "ledger_interface - transfer failed " # debug_show(request) # " " # debug_show(err), ?request.caller)); + }; + }; + + return #ok(#nat(Nat64.toNat(result_block))); + + } catch (e){ + return #err(Types.errors(#improper_interface, "ledger_interface - ledger throw " # Error.message(e) # debug_show(request), ?request.caller)); + }; + + }; + + //sends a payment and withdraws a fee + public func send_payment_minus_fee(account: Types.Account, token: Types.ICTokenSpec, amount : Nat, sub_account: ?Blob, caller: Principal) : async Result.Result<{trx_id: Types.TransactionID; fee: Nat}, Types.OrigynError> { + debug if(debug_channel.transfer) D.print("in send payment deposit"); + + let ledger : DFXTypes.Service = actor(Principal.toText(token.canister)); + try{ + debug if(debug_channel.transfer) D.print("sending payment" # debug_show((account, sub_account))); + let account_id = switch(account){ + case(#account_id(val)){switch(AccountIdentifier.fromText(val)){ + case(#ok(val)){AccountIdentifier.addHash(val)}; + case(#err(e)){return #err(Types.errors(#nyi, "ledger_interface - send payment - bad principal" # debug_show(account), ?caller));} + }}; + case(#principal(val)){AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(val ,null))}; + case(#account(val)){AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(val.owner ,switch(val.sub_account){case(null){null;}; case(?val){ ?Blob.toArray(val);}}))}; + case(_){return #err(Types.errors(#nyi, "ledger_interface - send payment - bad acount" # debug_show(account), ?caller));} + }; + + debug if(debug_channel.transfer) D.print("account_id" # debug_show( Blob.fromArray(account_id))); + + let result = await ledger.transfer({ + to = Blob.fromArray(account_id); + from_subaccount = switch(sub_account){ + case(null){null;}; + case(?val){?Blob.toArray(val)}; + }; + fee = {e8s = Nat64.fromNat(token.fee)}; + memo = Nat64.fromNat(Nat32.toNat(Text.hash("com.origyn.nft.out_going_payment"))); + created_at_time = ?{timestamp_nanos = Nat64.fromNat(Int.abs(Time.now()))}; + amount = {e8s = Nat64.fromNat(amount - token.fee)}; //many other places assume the token fee is removed here so don't change this + }); + + debug if(debug_channel.transfer) D.print(debug_show(result)); + + switch(result){ + case(#Ok(val)){ + #ok({trx_id = #nat(Nat64.toNat(val)); fee = token.fee}); + }; + case(#Err(err)){ + #err(Types.errors(#nyi, "ledger_interface - send payment - payment failed " # debug_show(err), ?caller));} + }; + } catch (e){ + return #err(Types.errors(#nyi, "ledger_interface - send payment - payment failed " # Error.message(e), ?caller));} + }; + +}; diff --git a/src/origyn_nft_reference/main.mo b/src/origyn_nft_reference/main.mo new file mode 100644 index 0000000..e799e12 --- /dev/null +++ b/src/origyn_nft_reference/main.mo @@ -0,0 +1,1520 @@ +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import Buffer "mo:base/Buffer"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversions "mo:candy_0_1_10/conversion"; +import Current "migrations/v000_001_000/types"; +import Cycles "mo:base/ExperimentalCycles"; +import D "mo:base/Debug"; +import DIP721 "DIP721"; +import EXT "mo:ext/Core"; +import EXTCommon "mo:ext/Common"; +import Error "mo:base/Error"; +import Iter "mo:base/Iter"; +import Map "mo:map_6_0_0/Map"; +import Market "market"; +import Metadata "metadata"; +import MigrationTypes "./migrations/types"; +import Migrations "./migrations"; +import Mint "mint"; +import NFTUtils "utils"; +import Nat "mo:base/Nat"; +import Nat32 "mo:base/Nat32"; +import Nat8 "mo:base/Nat8"; +import Option "mo:base/Option"; +import Owner "owner"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Text "mo:base/Text"; +import Time "mo:base/Time"; +import TrieMap "mo:base/TrieMap"; +import Types "./types"; +import Governance "governance"; +import Workspace "mo:candy_0_1_10/workspace"; +import data "data"; +import http "http"; + +shared (deployer) actor class Nft_Canister(__initargs : Types.InitArgs) = this { + + //lets user turn debug messages on and off for local replica + let debug_channel = { + instantiation = true; + upgrade = false; + function_announce = false; + storage = false; + streaming = false; + }; + + debug if(debug_channel.instantiation) D.print("creating a canister"); + + //a standard file chunck size. The IC limits intercanister messages to ~2MB+ so we set that here + stable var SIZE_CHUNK = 2048000; //max message size + + //canisters can support multiple storage nodes + //if you have a small collection you don't need to use a storage collection + //and can have this gateway canister act as your storage. + let initial_storage = switch(__initargs.storage_space){ + case(null){ + SIZE_CHUNK * 500; //default is 1GB + }; + case(?val){ + if(val > SIZE_CHUNK * 1000){ //only 2GB useable in a canister - hopefully this changes in the future + assert(false); + }; + val; + } + }; + + + ///for migration information and pattern see + //https://github.com/ZhenyaUsenko/motoko-migrations + let StateTypes = MigrationTypes.Current; + let SB = StateTypes.SB; + + debug if(debug_channel.instantiation) D.print("setting migration type to 0"); + + + stable var migration_state: MigrationTypes.State = #v0_0_0(#data); + + debug if(debug_channel.instantiation) D.print("migrating"); + + // do not forget to change #v0_1_0 when you are adding a new migration + // if you use one previus states in place of #v0_1_0 it will run downgrade methods instead + migration_state := Migrations.migrate(migration_state, #v0_1_0(#id), {owner = __initargs.owner; storage_space = initial_storage}); + + + + + + /* + example migration + + switch(migration_state){ + case(#v0_1_1(val)){ + migration_state := Migrations.migrate(migration_state, #v0_1_0(#id), { owner = deployer.caller; storage_space = initial_storage }); + }; + case(_){}; + }; + + */ + + // do not forget to change #v0_1_0 when you are adding a new migration + let #v0_1_0(#data(state_current)) = migration_state; + + debug if(debug_channel.instantiation) D.print("done initing migration_state" # debug_show(state_current.collection_data.owner) # " " # debug_show(deployer.caller)); + + debug if(debug_channel.instantiation) D.print("initializing from " # debug_show((deployer, __initargs)) ); + + //used to get status of the canister and report it + stable var ic : Types.IC = actor("aaaaa-aa"); + + //upgrade storage for non-stable types + stable var nft_library_stable : [(Text,[(Text,CandyTypes.AddressedChunkArray)])] = []; + stable var access_tokens_stable : [(Text, Types.HttpAccess)] = []; + + //stores data for a library - unstable because it uses Candy Workspaces to hold active and maleable bits of data that can be manipulated in real time + private var nft_library : TrieMap.TrieMap> = NFTUtils.build_library(nft_library_stable); + + //store access tokens for owner assets to owner specific data + private var access_tokens : TrieMap.TrieMap = TrieMap.fromEntries(access_tokens_stable.vals(), Text.equal, Text.hash); + + //lets us get the principal of the host gateway canister + private var canister_principal : ?Principal = null; + private func get_canister(): Principal { + switch(canister_principal){ + case(null){ + canister_principal := ?Principal.fromActor(this); + Principal.fromActor(this); + }; + case(?val){ + val; + } + } + }; + + //lets us access state and pass it to other modulas + let get_state : () -> Types.State = func (){ + { + state = state_current; + canister = get_canister; + get_time = get_time; + nft_library = nft_library; + refresh_state = get_state; + access_tokens = access_tokens; + }; + }; + + //used for debugging + stable var __time_mode : {#test; #standard;} = #standard; + private var __test_time : Int = 0; + private func get_time() : Int{ + switch(__time_mode){ + case(#standard){return Time.now();}; + case(#test){return __test_time;}; + }; + + }; + + //data api - currently entire api nodes must be updated at one time + //in future releases more granular updates will be possible. + public shared (msg) func update_app_nft_origyn(request: Types.NFTUpdateRequest): async Result.Result{ + + NFTUtils.add_log(get_state(), { + event = "update_app_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + return data.update_app_nft_origyn(request, get_state(), msg.caller); + }; + + //stages metadata for an NFT + public shared (msg) func stage_nft_origyn({metadata : CandyTypes.CandyValue}): async Result.Result{ + //nyi: if we run out of space, start putting data into child canisters + + NFTUtils.add_log(get_state(), { + event = "stage_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in stage"); + return Mint.stage_nft_origyn(get_state(), metadata, msg.caller); + }; + + //allows staging multiple NFTs at the same time + public shared (msg) func stage_batch_nft_origyn(request : [{metadata: CandyTypes.CandyValue}]): async [Result.Result]{ + NFTUtils.add_log(get_state(), { + event = "stage_batch_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in stage batch"); + if( NFTUtils.is_owner_manager_network(get_state(), msg.caller) == false){ + return [#err(Types.errors(#unauthorized_access, "market_transfer_batch_nft_origyn - not an owner, manager, or network", ?msg.caller))]; + }; + + + let results = Buffer.Buffer>(request.size()); + for(this_item in request.vals()){ + //nyi: should probably check for some spammy things and bail if too many errors + results.add(Mint.stage_nft_origyn(get_state(), this_item.metadata, msg.caller)); + }; + return results.toArray(); + + }; + + //stages a library. If the gateway is out of space a new bucket will be requested + //and the remote stage call will be made to send the chunk to the proper canister.Array + //creators can also send library metadata to update library info without the data + public shared (msg) func stage_library_nft_origyn(chunk : Types.StageChunkArg) : async Result.Result { + NFTUtils.add_log(get_state(), { + event = "stage_library_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in stage library"); + switch(Mint.stage_library_nft_origyn( + get_state(), + chunk, + msg.caller)){ + case(#ok(stage_result)){ + switch(stage_result){ + case(#staged(canister)){ + return #ok({canister = canister}); + }; + case(#stage_remote(data)){ + debug if(debug_channel.storage) D.print("minting remote"); + return await Mint.stage_library_nft_origyn_remote( + get_state(), + chunk, + data.allocation, + data.metadata, + msg.caller); + }; + }; + }; + case(#err(err)){ + return #err(err); + }; + }; + }; + + //allows for batch library staging but this should only be used for collection or web based + //libraries that do not have actual file data. If a remote call is made then the cycle limit + //will be hit after a few cros canister calls + public shared (msg) func stage_library_batch_nft_origyn(chunks : [Types.StageChunkArg]) : async [Result.Result] { + //nyi: this needs to be gated to make sure the chunks don't contain file data. This should only be used for collection asset adding + + NFTUtils.add_log(get_state(), { + event = "stage_library_batch_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in stage library batch"); + let results = Buffer.Buffer>(chunks.size()); + for(this_item in chunks.vals()){ + switch(Mint.stage_library_nft_origyn( + get_state(), + this_item, + msg.caller)){ + case(#ok(stage_result)){ + switch(stage_result){ + case(#staged(canister)){ + results.add( #ok({canister = canister})); + }; + case(#stage_remote(data)){ + debug if(debug_channel.storage) D.print("minting remote from batch. You are going to run out of cycles"); + results.add( await Mint.stage_library_nft_origyn_remote( + get_state(), + this_item, + data.allocation, + data.metadata, + msg.caller) + ); + }; + }; + }; + case(#err(err)){ + results.add(#err(err)); + }; + }; + }; + + return results.toArray(); + }; + + //mints an NFT and assigns it to the new owner. + public shared (msg) func mint_nft_origyn(token_id : Text, new_owner : Types.Account) : async Result.Result { + NFTUtils.add_log(get_state(), { + event = "mint_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in mint"); + return await Mint.mint_nft_origyn(get_state(), token_id, new_owner, msg.caller); + + }; + + //allows minting of multiple items + public shared (msg) func mint_batch_nft_origyn(tokens: [(Text, Types.Account)]) : async [Result.Result] { + //this invovls an inter canister call and will not work well for multi canister collections. Test to figure out how many you can mint at a time; + NFTUtils.add_log(get_state(), { + event = "mint__batch_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + if(NFTUtils.is_owner_manager_network(get_state(),msg.caller) == false){return [#err(Types.errors(#unauthorized_access, "mint_nft_origyn - not an owner", ?msg.caller))]}; + debug if(debug_channel.function_announce) D.print("in mint batch"); + let results = Buffer.Buffer>(tokens.size()); + + label search for(thisitem in tokens.vals()){ + results.add(await Mint.mint_nft_origyn(get_state(), thisitem.0, thisitem.1, msg.caller)) + }; + + return results.toArray(); + }; + + //allows an owner to transfer an NFT from one of their wallets to another + //warning: this feature will be updated in the future to give both wallets access to the NFT + //for some set period of time including access to assets beyond just the NFT ownership. It should not + //be used with a wallet that you do not 100% trust to not take the NFT back. It is meant for + //internal accounting only. Use market_transfer_nft_origyn instead + public shared (msg) func share_wallet_nft_origyn(request : Types.ShareWalletRequest) : async Result.Result { + NFTUtils.add_log(get_state(), { + event = "share_wallet_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in share wallet"); + return Owner.share_wallet_nft_origyn(get_state(), request, msg.caller); + }; + + public shared (msg) func governance_nft_origyn(request : Types.GovernanceRequest) : async Result.Result { + NFTUtils.add_log(get_state(), { + event = "governance_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in owner governance"); + return Governance.governance_nft_origyn(get_state(), request, msg.caller); + }; + + //dip721 transferFrom + public shared (msg) func transferFromDip721(from: Principal, to: Principal, tokenAsNat: Nat) : async DIP721.Result{ + NFTUtils.add_log(get_state(), { + event = "transferFromDip721"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in transferFromDip721"); + //existing escrow acts as approval + if(msg.caller != to){ + return #Err(#UnauthorizedOperator); + }; + return await Owner.transferDip721(get_state(),from, to, tokenAsNat, msg.caller); + }; + + //dip721 transfer + public shared (msg) func transferDip721(to: Principal, tokenAsNat: Nat) : async DIP721.Result{ + NFTUtils.add_log(get_state(), { + event = "transferDip721"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in transferFromDip721"); + //existing escrow acts as approval + return await Owner.transferDip721(get_state(),msg.caller, to, tokenAsNat, msg.caller); + }; + + //dip721 transferFrom "v2" downgrade + public shared (msg) func transferFrom(from: Principal, to: Principal, tokenAsNat: Nat) : async DIP721.Result{ + NFTUtils.add_log(get_state(), { + event = "transferFrom"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in transferFrom"); + if(msg.caller != to){ + return #Err(#UnauthorizedOperator); + }; + //existing escrow acts as approval + return await Owner.transferDip721(get_state(),from, to, tokenAsNat, msg.caller); + }; + + + //EXT transferFrom + public shared (msg) func transferEXT(request: EXT.TransferRequest) : async EXT.TransferResponse{ + NFTUtils.add_log(get_state(), { + event = "transferEXT"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in transfer ext"); + //existing escrow is approval + return await Owner.transferExt(get_state(), request, msg.caller); + }; + + //EXT transferFrom legacy + public shared (msg) func transfer(request: EXT.TransferRequest) : async EXT.TransferResponse{ + NFTUtils.add_log(get_state(), { + event = "transfer"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in transfer"); + //existing escrow is approval + return await Owner.transferExt(get_state(), request, msg.caller); + }; + + + //allows the market based transfer of NFTs + public shared (msg) func market_transfer_nft_origyn(request : Types.MarketTransferRequest) : async Result.Result { + NFTUtils.add_log(get_state(), { + event = "market_transfer_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in market transfer"); + + return switch(request.sales_config.pricing){ + case(#instant(item)){ + return await Market.market_transfer_nft_origyn_async(get_state(), request, msg.caller); + }; + case(_){ + return Market.market_transfer_nft_origyn(get_state(), request, msg.caller); + } + }; + }; + + //start a large number of sales/market transfers. Currently limited to owners, managers, or the network + public shared (msg) func market_transfer_batch_nft_origyn(request : [Types.MarketTransferRequest]) : async [Result.Result] { + //nyi: for now limit this to managers + NFTUtils.add_log(get_state(), { + event = "market_transfer_batch_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + + debug if(debug_channel.function_announce) D.print("in market transfer batch"); + if( NFTUtils.is_owner_manager_network(get_state(), msg.caller) == false){ + return [#err(Types.errors(#unauthorized_access, "market_transfer_batch_nft_origyn - not an owner, manager, or network", ?msg.caller))]; + }; + + + let results = Buffer.Buffer>(request.size()); + + for(this_item in request.vals()){ + //nyi: should probably check for some spammy things and bail if too many errors + switch(this_item.sales_config.pricing){ + case(#instant(item)){ + results.add(await Market.market_transfer_nft_origyn_async(get_state(), this_item, msg.caller)); + }; + case(_){ + results.add(Market.market_transfer_nft_origyn(get_state(), this_item, msg.caller)); + }; + }; + }; + //D.print("made it"); + return results.toArray(); + }; + + + //allows a user to do a number of functions around an NFT sale including ending a sale, opening a sale, depositing an escrow + //refresh_offers, bidding in an auction, withdrawing funds from an escrow or sale + public shared (msg) func sale_nft_origyn(request: Types.ManageSaleRequest) : async Result.Result{ + NFTUtils.add_log(get_state(), { + event = "sale_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in sale_nft_origyn"); + + return switch(request){ + case(#end_sale(val)){await Market.end_sale_nft_origyn(get_state(), val, msg.caller)}; + case(#open_sale(val)){Market.open_sale_nft_origyn(get_state(), val, msg.caller)}; + case(#escrow_deposit(val)){return switch(await Market.escrow_nft_origyn(get_state(), val, msg.caller)){ + case(#ok(val)){#ok(#escrow_deposit(val))}; + case(#err(err)){#err(err)}; + }}; + case(#refresh_offers(val)){ + Market.refresh_offers_nft_origyn(get_state(), val, msg.caller); + }; + case(#bid(val)){ + return switch(await Market.bid_nft_origyn(get_state(), val, msg.caller)){ + case(#ok(val)){#ok(#bid(val))}; + case(#err(err)){#err(err)}; + } + + }; + case(#withdraw(val)){ + D.print("in withdrawl"); + return switch(await Market.withdraw_nft_origyn(get_state(), val, msg.caller)){ + case(#ok(val)){#ok(#withdraw(val))}; + case(#err(err)){#err(err)}; + } + + }; + }; + + }; + + //allows batch operations + public shared (msg) func sale_batch_nft_origyn(requests: [Types.ManageSaleRequest]) : async [Result.Result]{ + + debug if(debug_channel.function_announce) D.print("in sale_nft_origyn batch"); + if( NFTUtils.is_owner_manager_network(get_state(), msg.caller) == false){ + return [#err(Types.errors(#unauthorized_access, "sale_batch_nft_origyn - not an owner, manager, or network - batch not supported", ?msg.caller))]; + }; + NFTUtils.add_log(get_state(), { + event = "sale_batch_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + + + let result = Buffer.Buffer>(requests.size()); + for(this_item in requests.vals()){ + result.add(switch(this_item){ + //NOTE: this causes a commit and could over run the cycle limit. We may need to refactor to + // an end and then distribute pattern...or collect needed transfers and batch them. + case(#end_sale(val)){await Market.end_sale_nft_origyn(get_state(), val, msg.caller)}; + case(#open_sale(val)){Market.open_sale_nft_origyn(get_state(), val, msg.caller)}; + case(#escrow_deposit(val)){ switch(await Market.escrow_nft_origyn(get_state(), val, msg.caller)){ + case(#ok(val)){#ok(#escrow_deposit(val))}; + case(#err(err)){#err(err)}; + } + }; + case(#refresh_offers(val)){ + Market.refresh_offers_nft_origyn(get_state(), val, msg.caller); + }; + case(#bid(val)){ + switch(await Market.bid_nft_origyn(get_state(), val, msg.caller)){ + case(#ok(val)){#ok(#bid(val))}; + case(#err(err)){#err(err)}; + } + + }; + case(#withdraw(val)){ + switch(await Market.withdraw_nft_origyn(get_state(), val, msg.caller)){ + case(#ok(val)){#ok(#withdraw(val))}; + case(#err(err)){#err(err)}; + } + + }; + }); + }; + return result.toArray(); + + }; + + + private func _sale_info_nft_origyn(request: Types.SaleInfoRequest, caller: Principal) : Result.Result{ + return switch(request){ + case(#status(val)){Market.sale_status_nft_origyn(get_state(), val, caller)}; + case(#active(val)){Market.active_sales_nft_origyn(get_state(), val, caller)}; + case(#history(val)){Market.history_sales_nft_origyn(get_state(), val, caller)}; + case(#deposit_info(val)){Market.deposit_info_nft_origyn(get_state(), val, caller)}; + }; + }; + + + //allows for the retrieving of sale info + public query (msg) func sale_info_nft_origyn(request: Types.SaleInfoRequest) : async Result.Result{ + debug if(debug_channel.function_announce)D.print("in sale_info_nft_origyn"); + return _sale_info_nft_origyn(request, msg.caller); + + }; + + //get sale info in a secure manner + public shared(msg) func sale_info_secure_nft_origyn(request: Types.SaleInfoRequest) : async Result.Result{ + NFTUtils.add_log(get_state(), { + event = "sale_nft_secure_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in sale info secure"); + return _sale_info_nft_origyn(request, msg.caller); + }; + + //batch info + public query (msg) func sale_info_batch_nft_origyn(requests: [Types.SaleInfoRequest]) : async [Result.Result]{ + debug if(debug_channel.function_announce) D.print("in sale info batch"); + let result = Buffer.Buffer>(requests.size()); + for(this_item in requests.vals()){ + result.add(_sale_info_nft_origyn(this_item, msg.caller)); + }; + return result.toArray(); + + }; + + //batch info secure + public shared (msg) func sale_info_batch_secure_nft_origyn(requests: [Types.SaleInfoRequest]) : async [Result.Result]{ + NFTUtils.add_log(get_state(), { + event = "sale_batch_secure_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in sale info batch secure"); + let result = Buffer.Buffer>(requests.size()); + for(this_item in requests.vals()){ + result.add(_sale_info_nft_origyn(this_item, msg.caller)); + }; + return result.toArray(); + }; + + //allows an owner to update information about a collection + public shared (msg) func collection_update_nft_origyn(request : Types.ManageCollectionCommand) : async Result.Result{ + NFTUtils.add_log(get_state(), { + event = "collection_update_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in collection_update_nft_origyn"); + return Metadata.collection_update_nft_origyn(get_state(), request, msg.caller); + }; + + + //batch access + public shared (msg) func collection_update_batch_nft_origyn(requests : [Types.ManageCollectionCommand]) : async [Result.Result]{ + NFTUtils.add_log(get_state(), { + event = "collection_update_batch_nft_"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + debug if(debug_channel.function_announce) D.print("in collection_update_batch_nft_origyn"); + + //we do a first check of caller to avoid cycle drain + if(NFTUtils.is_owner_network(get_state(), msg.caller) == false){ + return [#err(Types.errors(#unauthorized_access, "collection_update_batch_nft_ - not a canister owner or network", ?msg.caller))]; + }; + + let results = Buffer.Buffer>(requests.size()); + for(this_item in requests.vals()){ + results.add(Metadata.collection_update_nft_origyn(get_state(), this_item, msg.caller)); + }; + + return results.toArray(); + }; + + + //debug function + public shared (msg) func __advance_time(new_time: Int) : async Int { + //nyi: Maybe only the network should be able to do this + if(msg.caller != state_current.collection_data.owner){ + throw Error.reject("not owner"); + }; + __test_time := new_time; + return __test_time; + + }; + + //debug function + public shared (msg) func __set_time_mode(newMode: {#test; #standard;}) : async Bool { + //nyi: Maybe only the network should be able to do this + if(msg.caller != state_current.collection_data.owner){ + throw Error.reject("not owner"); + }; + __time_mode := newMode; + return true; + }; + + //allows the owner to manage the storage on their NFT + public shared (msg) func manage_storage_nft_origyn(request : Types.ManageStorageRequest) : async Result.Result{ + if(NFTUtils.is_owner_network(get_state(), msg.caller) == false){ + throw Error.reject("not owner or network"); + }; + debug if(debug_channel.function_announce) D.print("in collection_update_batch_nft_origyn"); + NFTUtils.add_log(get_state(), { + event = "manage_storage_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + + let state = get_state(); + + switch(request){ + case(#add_storage_canisters(request)){ + for(this_item in request.vals()){ + //make sure that if this exists we re allocate or error + switch(Map.get(state.state.buckets, Map.phash, this_item.0)){ + case(null){}; + case(?val){ + //eventually we can accomidate reallocation, but fail for now + return #err(Types.errors(#storage_configuration_error, "manage_storage_nft_origyn - principal already exists in buckets " # debug_show(this_item), ?msg.caller)); + + }; + }; + + Map.set(state.state.buckets, Map.phash, this_item.0, { + principal = this_item.0; + var allocated_space = this_item.1; + var available_space = this_item.1; + date_added = get_time(); + b_gateway = false; + var version = this_item.2; + var allocations = Map.new<(Text,Text), Int>(); + + }); + state.state.collection_data.allocated_storage += this_item.1; + state.state.collection_data.available_space += this_item.1; + }; + return #ok(#add_storage_canisters(state.state.collection_data.allocated_storage, + state.state.collection_data.available_space)); + }; + }; + + return #err(Types.errors(#nyi, "manage_storage_nft_origyn nyi ", ?msg.caller)); + + }; + + + + //[Text, ?Nat, ?Nat] for pagination + public query (msg) func collection_nft_origyn(fields : ?[(Text,?Nat, ?Nat)]) : async Result.Result{ + //warning: this functiondoes not use msg.caller, if you add it you need to fix the secure query + debug if(debug_channel.function_announce) D.print("in collection_nft_origyn"); + + let state = get_state(); + let keys = Iter.toArray(Iter.filter(Map.keys(state.state.nft_ledgers), func (x : Text){ x != ""})); //should always have the "" item and need to remove it + let multi_canister = Iter.toArray(Map.keys(state.state.buckets)); + + + return #ok({ + fields = fields; + logo = state.state.collection_data.logo; + name = state.state.collection_data.name; + symbol = state.state.collection_data.symbol; + total_supply = ?keys.size(); + owner = ?state.state.collection_data.owner; + managers = ?state.state.collection_data.managers; + network = state.state.collection_data.network; + token_ids = ?keys; + token_ids_count = ?keys.size(); + multi_canister = ?multi_canister; + multi_canister_count = ?multi_canister.size(); + metadata = Map.get(state.state.nft_metadata, Map.thash, ""); + allocated_storage = ?state.state.collection_data.allocated_storage; + available_space = ?state.state.collection_data.available_space; + } + ); + + + }; + + //secure access to collection information + + public shared (msg) func collection_secure_nft_origyn(fields : ?[(Text,?Nat, ?Nat)]) : async Result.Result{ + NFTUtils.add_log(get_state(), { + event = "collection_secure_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + + debug if(debug_channel.function_announce) D.print("in collection_secure_nft_origyn"); + + return await collection_nft_origyn(fields); + }; + + //allows users to see token information + public query (msg) func history_nft_origyn(token_id : Text, start: ?Nat, end: ?Nat) : async Result.Result<[Types.TransactionRecord],Types.OrigynError> { + //warning: this func does not use msg.caller. If you decide to use it, fix the secure caller + debug if(debug_channel.function_announce) D.print("in collection_secure_nft_origyn"); + + + let ledger = switch(Map.get(state_current.nft_ledgers, Map.thash, token_id)){ + case(null){ + return #ok([]); + }; + case(?val){ + var thisStart = 0; + var thisEnd = Nat.sub(SB.size(val),1); + switch(start, end){ + case(?start, ?end){ + thisStart := start; + thisEnd := end; + }; + case(?start, null){ + thisStart := start; + }; + case(null, ?end){ + thisEnd := end; + }; + case(null, null){}; + }; + + if(thisEnd >= thisStart){ + + let result = Buffer.Buffer((thisEnd + 1) - thisStart); + for(this_item in Iter.range(thisStart, thisEnd)){ + result.add(switch(SB.getOpt(val, this_item)){case(?item){item};case(null){ + return #err(Types.errors(#asset_mismatch, "history_nft_origyn - index out of range " # debug_show(this_item) # " " # debug_show(SB.size(val)), ?msg.caller)); + + }}); + }; + + return #ok(result.toArray()); + } else { + //enable revrange + return #err(Types.errors(#nyi, "history_nft_origyn - rev range nyi " # debug_show(thisStart) # " " # debug_show(thisEnd), ?msg.caller)); + }; + }; + }; + }; + + //secure access to token history + public shared (msg) func history_secure_nft_origyn(token_id : Text, start: ?Nat, end: ?Nat) : async Result.Result<[Types.TransactionRecord],Types.OrigynError> { + debug if(debug_channel.function_announce) D.print("in history_secure_nft_origyn"); + NFTUtils.add_log(get_state(), { + event = "history_secure_nft_origyn"; + timestamp = get_time(); + data = #Empty; + caller = ?msg.caller; + }); + + + return await history_nft_origyn(token_id, start,end); + }; + + + + + //dip721 balance + public query(msg) func balanceOfDip721(user: Principal) : async Nat{ + debug if(debug_channel.function_announce) D.print("in balanceOfDip721"); + + return (Metadata.get_NFTs_for_user(get_state(), #principal(user))).size(); + }; + + //dip721 balance + public query(msg) func balance(request: EXT.BalanceRequest) : async EXT.BalanceResponse{ //legacy ext + debug if(debug_channel.function_announce) D.print("in balance"); + + return _getEXTBalance(request); + }; + + //ext balance + public query(msg) func balanceEXT(request: EXT.BalanceRequest) : async EXT.BalanceResponse { + debug if(debug_channel.function_announce) D.print("in balanceEXT"); + + return _getEXTBalance(request); + }; + + private func _getEXTBalance(request: EXT.BalanceRequest) : EXT.BalanceResponse{ + let thisCollection = Metadata.get_NFTs_for_user(get_state(), switch(request.user){ + case(#address(data)){ + #account_id(data); + }; + case(#principal(data)){ + #principal(data); + }; + } + ); + for(this_item in thisCollection.vals()){ + if(_getEXTTokenIdentifier(this_item) == request.token){ + return #ok(1: Nat); + } + }; + return #ok(0: Nat); + }; + + //converts a token id into a reversable ext token id + private func _getEXTTokenIdentifier(token_id: Text) : Text{ + let tds : [Nat8] = [10, 116, 105, 100]; //b"\x0Atid" + let theID = Array.append( + Array.append(tds, Blob.toArray(Principal.toBlob(Principal.fromActor(this)))), + Conversions.valueToBytes(#Nat32(Text.hash(token_id)))); + + return Principal.toText(Principal.fromBlob(Blob.fromArray(theID))); + }; + + //lets users query for a token id + public query(msg) func getEXTTokenIdentifier(token_id: Text) : async Text{ + debug if(debug_channel.function_announce) D.print("in getEXTTokenIdentifier"); + return _getEXTTokenIdentifier(token_id); + }; + + private func _balance_of_nft_origyn(account: Types.Account, caller: Principal) : Result.Result { + + + debug if(debug_channel.function_announce) D.print("in balance_of_nft_origyn"); + + let state = get_state(); + + //get escrows + let escrows = Map.get(state_current.escrow_balances, Types.account_handler, account); + let escrowResults = Buffer.Buffer(1); + + let sales = Map.get(state_current.sales_balances, Types.account_handler, account); + let salesResults = Buffer.Buffer(1); + + let nft_results = Buffer.Buffer(1); + + let offers = Map.get>(state.state.offers, Types.account_handler, account); + let offer_results = Buffer.Buffer(1); + + //nyi: check the mint status and compare to msg.caller + //nyi: indexing of NFTs, Escrows, Sales, Offers if this is a performance drain + for(this_nft in Map.entries(state.state.nft_metadata)){ + switch(Metadata.is_nft_owner(this_nft.1, account)){ + case(#ok(val)){ + if(val == true and this_nft.0 != ""){ + nft_results.add(this_nft.0); + }; + }; + case(_){}; + }; + + }; + + + switch(escrows) + { + case(null){}; + case(?this_buyer){ + Iter.iterate(Map.vals(this_buyer), func(thisSeller, x){ + Iter.iterate(Map.vals(thisSeller), func(this_token_id, x){ + Iter.iterate(Map.vals(this_token_id), func(this_ledger, x){ + escrowResults.add(this_ledger); + }); + }); + }); + }; + }; + + switch(sales) + { + case(null){}; + case(?thisSeller){ + Iter.iterate(Map.vals(thisSeller), func(this_buyer, x){ + Iter.iterate(Map.vals(this_buyer), func(this_token_id, x){ + Iter.iterate(Map.vals(this_token_id), func(this_ledger, x){ + salesResults.add(this_ledger); + }); + }); + }); + }; + }; + + + switch(offers){ + case(null){}; + case(?found_offer){ + for(this_buyer in Map.entries(found_offer)){ + switch(Map.get(state_current.escrow_balances, Types.account_handler, this_buyer.0)){ + case(null){}; + case(?found_buyer){ + switch(Map.get(found_buyer, Types.account_handler, account)){ + case(null){}; + case(?found_seller){ + for(this_token in Map.entries(found_seller)){ + for(this_ledger in Map.entries(this_token.1)){ + offer_results.add(this_ledger.1); + }; + }; + }; + }; + }; + }; + }; + }; + }; + + return #ok{ + multi_canister = null; //nyi + nfts = nft_results.toArray(); + escrow = escrowResults.toArray(); + sales = salesResults.toArray(); + stake = []; + offers = offer_results.toArray(); + }; + }; + + + //lets a user query the balances for their nfts, escrows, sales, offers, and stakes + public query(msg) func balance_of_nft_origyn(account: Types.Account) : async Result.Result{ + + return _balance_of_nft_origyn(account, msg.caller); + }; + + + + //allows secure access to balance + public shared(msg) func balance_of_secure_nft_origyn(account: Types.Account) : async Result.Result{ + return _balance_of_nft_origyn(account, msg.caller); + }; + + + private func _bearer_of_nft_origyn(token_id : Text, caller: Principal) : Result.Result{ + let foundVal = switch( + Metadata.get_nft_owner( + switch(Metadata.get_metadata_for_token(get_state(),token_id, caller, null, state_current.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "bearer_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + })){ + case(#err(err)){ + return #err(Types.errors(err.error, "bearer_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + return #ok(val); + }; + }; + }; + + //returns the owner of the NFT indicated by token_id + public query (msg) func bearer_nft_origyn(token_id : Text) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in bearer_nft_origyn"); + return _bearer_of_nft_origyn(token_id, msg.caller); + + }; + + //secure access to bearer + public shared (msg) func bearer_secure_nft_origyn(token_id : Text) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in bearer_secure_nft_origyn"); + + return _bearer_of_nft_origyn(token_id, msg.caller); + }; + + //provides access to searching a large number of bearers at one time + //nyi: could expose items not minted. add mint/owner check + public query (msg) func bearer_batch_nft_origyn(tokens : [Text]) : async [Result.Result]{ + debug if(debug_channel.function_announce) D.print("in bearer_secure_nft_origyn"); + + let results = Buffer.Buffer>(tokens.size()); + label search for(thisitem in tokens.vals()){ + results.add( _bearer_of_nft_origyn(thisitem, msg.caller)); + + }; + return results.toArray(); + }; + + // secure access to bearer batch + public shared (msg) func bearer_batch_secure_nft_origyn(tokens : [Text]) : async [Result.Result]{ + debug if(debug_channel.function_announce) D.print("in bearer_batch_secure_nft_origyn"); + + let results = Buffer.Buffer>(tokens.size()); + label search for(thisitem in tokens.vals()){ + results.add( _bearer_of_nft_origyn(thisitem, msg.caller)); + + }; + return results.toArray(); + }; + + //conversts a token id to a Nat for use in dip721 + public query(msg) func get_token_id_as_nat_origyn(token_id : Text) : async Nat { + debug if(debug_channel.function_announce) D.print("in get_token_id_as_nat_origyn"); + + return NFTUtils.get_token_id_as_nat(token_id); + }; + + //converts a nat to an token_id for Nat + public query(msg) func get_nat_as_token_id_origyn(tokenAsNat : Nat) : async Text { + debug if(debug_channel.function_announce) D.print("in get_nat_as_token_id_origyn"); + + NFTUtils.get_nat_as_token_id(tokenAsNat) + }; + + private func _ownerOfDip721 (tokenAsNat: Nat, caller: Principal) : DIP721.OwnerOfResponse{ + let foundVal = switch( + Metadata.get_nft_owner( + switch(Metadata.get_metadata_for_token(get_state(), + NFTUtils.get_nat_as_token_id(tokenAsNat) + , caller, null, state_current.collection_data.owner)){ + case(#err(err)){ + return #Err(#TokenNotFound); + }; + case(#ok(val)){ + val; + }; + })){ + case(#err(err)){ + return #Err(#Other("ownerOf " # err.flag_point)); + }; + case(#ok(val)){ + switch(val){ + case(#principal(data)){ + return #Ok(?data); + }; + case(_){ + return #Err(#Other("ownerOf unsupported owner type by DIP721" # debug_show(val))); + } + } + }; + }; + }; + + // owner of dip721 + public query(msg) func ownerOfDIP721(tokenAsNat: Nat) : async DIP721.OwnerOfResponse{ + debug if(debug_channel.function_announce) D.print("in ownerOfDIP721"); + + return _ownerOfDip721(tokenAsNat, msg.caller); + }; + + //for dip721 "v2" downgrade in usability + public query(msg) func ownerOf(tokenAsNat: Nat) : async DIP721.OwnerOfResponse{ + debug if(debug_channel.function_announce) D.print("in ownerOf"); + + return _ownerOfDip721(tokenAsNat, msg.caller); + }; + + //supports EXT Bearer + public query(msg) func bearerEXT(tokenIdentifier: EXT.TokenIdentifier) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in bearerEXT"); + + return Owner.bearerEXT(get_state(), tokenIdentifier, msg.caller); + }; + + //supports EXT Bearer legacy + public query(msg) func bearer(tokenIdentifier: EXT.TokenIdentifier) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in bearer"); + + return Owner.bearerEXT(get_state(), tokenIdentifier, msg.caller); + }; + + private func _nft_origyn(token_id : Text, caller: Principal) : Result.Result{ + //D.print("Calling NFT_Origyn"); + var metadata = switch(Metadata.get_metadata_for_token(get_state(),token_id, caller, null, state_current.collection_data.owner)){ + case(#err(err)){ + return #err(err); + }; + case(#ok(val)){ + val; + }; + }; + + let final_object = Metadata.get_clean_metadata(metadata, caller); + + //identify a current sale + let current_sale : ?Types.SaleStatusStable = switch(Metadata.get_current_sale_id(metadata)){ + case(#Empty){null}; + case(#Text(val)){ + do ? {Types.SalesStatus_stabalize_for_xfer(Map.get(state_current.nft_sales, Map.thash,val)!)}; + }; + case(_){ + //should be an error + null}; + }; + return(#ok({ + current_sale = current_sale; + metadata= final_object;} + )); + + return #ok({current_sale= null; metadata = #Empty;}); + }; + + + //returns metadata about an NFT + public query (msg) func nft_origyn(token_id : Text) : async Result.Result{ + + debug if(debug_channel.function_announce) D.print("in nft_origyn"); + + return _nft_origyn(token_id, msg.caller); + }; + + //secure access to nft_origyn + public shared (msg) func nft_secure_origyn(token_id : Text) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in nft_secure_origyn"); + + return _nft_origyn(token_id, msg.caller); + }; + + //batch access to nft metadata + public query (msg) func nft_batch_origyn(token_ids : [Text]) : async [Result.Result]{ + debug if(debug_channel.function_announce) D.print("in nft_batch_origyn"); + + let results = Buffer.Buffer>(token_ids.size()); + label search for(thisitem in token_ids.vals()){ + + results.add(_nft_origyn(thisitem, msg.caller)); + }; + + return results.toArray(); + }; + + public shared (msg) func nft_batch_secure_origyn(token_ids : [Text]) : async [Result.Result]{ + debug if(debug_channel.function_announce) D.print("in nft_batch_secure_origyn"); + + let results = Buffer.Buffer>(token_ids.size()); + label search for(thisitem in token_ids.vals()){ + + results.add( _nft_origyn(thisitem, msg.caller)); + }; + + return results.toArray(); + }; + + + //pull a chunk of an nft library + //the IC can only pull back ~2MB per request. This allows reading an entire library file by a user or canister + public query (msg) func chunk_nft_origyn(request : Types.ChunkRequest) : async Result.Result{ + //D.print("looking for a chunk" # debug_show(request)); + //check mint property + debug if(debug_channel.function_announce) D.print("in chunk_nft_origyn"); + + return Metadata.chunk_nft_origyn(get_state(), request, ?msg.caller); + }; + + //secure access to chunks + public shared (msg) func chunk_secure_nft_origyn(request : Types.ChunkRequest) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in chunk_secure_nft_origyn"); + + return Metadata.chunk_nft_origyn(get_state(), request, ?msg.caller); + }; + + //cleans access keys + private func clearAccessKeysExpired() { + let max_size = 20000; + if(access_tokens.size() > max_size) { + Iter.iterate(access_tokens.keys(), func(key, _index) { + switch(access_tokens.get(key)){ + case(null){}; + case(?item){ + if(item.expires < get_time()){ + access_tokens.delete(key); + } + } + } + }); + }; + }; + + let access_expiration = (1000 * 360 * (1_000_000)); //360s + + //registers a principal with a access key so a user can use that key to make http queries + public shared(msg) func http_access_key(): async Result.Result { + debug if(debug_channel.function_announce) D.print("in http_access_key"); + + //nyi: spam prevention + if(Principal.isAnonymous(msg.caller) ){return #err(Types.errors(#unauthorized_access, "http_access_key - anon not allowed", ?msg.caller))}; + + clearAccessKeysExpired(); + + let access_key = (await http.gen_access_key()) # Nat32.toText(Text.hash(debug_show(msg.caller, Time.now()))); + + access_tokens.put(access_key, { + identity = msg.caller; + expires = Time.now() + access_expiration; + }); + + #ok(access_key); + }; + + //gets an access key for a user + public query(msg) func get_access_key(): async Result.Result { + debug if(debug_channel.function_announce) D.print("in get_access_key"); + //optimization: use a Map + for((key, info) in access_tokens.entries()){ + if(Principal.equal(info.identity, msg.caller)) { + return #ok(key); + }; + }; + + #err(Types.errors(#property_not_found, "access key not found by caller", ?msg.caller)); + }; + + //handles http requets + public query(msg) func http_request(rawReq: Types.HttpRequest): async (http.HTTPResponse) { + debug if(debug_channel.function_announce) D.print("in http_request"); + + return http.http_request(get_state(), rawReq, msg.caller); + }; + + // A streaming callback based on NFTs. Returns {[], null} if the token can not be found. + // Expects a key of the following pattern: "nft/{key}". + public query func nftStreamingCallback(tk : http.StreamingCallbackToken) : async http.StreamingCallbackResponse { + debug if(debug_channel.streaming) D.print("The nftstreamingCallback " # debug_show(debug_show(tk))); + debug if(debug_channel.function_announce) D.print("in chunk_nft_origyn"); + + + return http.nftStreamingCallback(tk, get_state()); + }; + + //handles streaming + public query func http_request_streaming_callback( + tk : http.StreamingCallbackToken + ) : async http.StreamingCallbackResponse { + return http.http_request_streaming_callback(tk, get_state()); + }; + + //lets a user see who they are + public query (msg) func whoami(): async (Principal) { msg.caller }; + + //returns the status of the gateway canister + public shared func canister_status(request: { canister_id: Types.canister_id }): async Types.canister_status { + await ic.canister_status(request) + }; + + //reports cylces + public query func cycles(): async Nat { + Cycles.balance() + }; + + //returns storage metrics for this server + public query func storage_info_nft_origyn() : async Result.Result{ + //warning: this func does not use msg.caller. If that changes, fix secure query + debug if(debug_channel.function_announce) D.print("in storage_info_nft_origyn"); + + let state = get_state(); + return #ok({ + allocated_storage = state.state.canister_allocated_storage; + available_space = state.state.canister_availible_space; + allocations = Iter.toArray(Iter.map(Map.vals<(Text,Text),Types.AllocationRecord>(state.state.allocations),Types.allocation_record_stabalize)); + }); + }; + + //secure access to storage inf + public shared(msg) func storage_info_secure_nft_origyn() : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in storage_info_secure_nft_origyn"); + return await storage_info_nft_origyn(); + }; + + + //metadata for ext + public query func metadata(token : EXT.TokenIdentifier) : async Result.Result{ + debug if(debug_channel.function_announce) D.print("in metadata"); + + let token_id = switch(Owner.getNFTForTokenIdentifier(get_state(), token)){ + case(#ok(data)){ + data + }; + case(#err(err)){ + return #err(#InvalidToken(token)); + }; + }; + + return #ok(#nonfungible({ + metadata = ?Text.encodeUtf8("https://exos.origyn.network/-/" # Principal.toText(get_canister()) # "/-/" # token_id) + })); + }; + + + // set the `log_harvester` + public shared (msg) func set_log_harvester_id(_id: Principal): async () { + let state = get_state(); + if(msg.caller != state.state.collection_data.owner) { throw Error.reject("not owner")}; + + NFTUtils.add_log(get_state(), { + event = "set_log_harvester_id"; + timestamp = get_time(); + data = #Principal(_id); + caller = ?msg.caller; + }); + state.state.log_harvester := _id; + }; + + // get the last pages number of logs and burns them + public shared(msg) func harvest_log(pages : Nat) : async [[Types.LogEntry]]{ + assert(pages > 0); + let state = get_state(); + if(msg.caller != state.state.log_harvester) { + throw Error.reject("not the log harvester"); + }; + let result = Buffer.Buffer<[Types.LogEntry]>(pages); + for(thisRound in Iter.range(0, pages-1)){ + let chunk = SB.removeLast(state.state.log_history); + switch(chunk){ + case(null){}; + case(?v){ + result.add(v); + }; + }; + }; + return result.toArray(); + }; + + //destroys the log + public shared(msg) func nuke_log() : async (){ + let state = get_state(); + if(msg.caller != state.state.log_harvester) { + throw Error.reject("not the log harvester"); + }; + state.state.log_history := SB.initPresized<[Types.LogEntry]>(1); + }; + + //log info + public query(msg) func log_history_size() : async Nat{ + let state = get_state(); + if(msg.caller != state.state.collection_data.owner and msg.caller != state.state.log_harvester ) { + throw Error.reject("no log rights"); + }; + return SB.size( state.state.log_history); + }; + + //look a specific page of log history + public query(msg) func log_history_page(i : Nat) : async [Types.LogEntry]{ + let state = get_state(); + if(msg.caller != state.state.collection_data.owner and msg.caller != state.state.log_harvester ) { + throw Error.reject("no log rights"); + }; + return SB.get( state.state.log_history, i); + }; + + //look a chunk by page if over 2MB + public query(msg) func log_history_page_chunk(i : Nat, start: Nat, end: Nat) : async [Types.LogEntry]{ + let state = get_state(); + if(msg.caller != state.state.collection_data.owner and msg.caller != state.state.log_harvester) { + throw Error.reject("no log rights"); + }; + let thisChunk = SB.get(state.state.log_history, i); + let result = Buffer.Buffer(end - start + 1); + Iter.iterate(thisChunk.vals(), func(a: Types.LogEntry, index: Nat){ + if(index >= start and index <= end){ + result.add(a); + }; + }); + return result.toArray(); + }; + + //gets the current log page + public query(msg) func current_log() : async [Types.LogEntry]{ + let state = get_state(); + if(msg.caller != state.state.collection_data.owner and msg.caller != state.state.log_harvester) { + throw Error.reject("no log rights"); + }; + return SB.toArray(state.state.log); + }; + + //announces support of interfaces + public query func __supports() : async [(Text,Text)]{ + [ + ("nft_origyn","v0.1.0"), + ("data_nft_origyn","v0.1.0"), + ("collection_nft_origyn","v0.1.0"), + ("mint_nft_origyn","v0.1.0"), + ("owner_nft_origyn","v0.1.0"), + ("market_nft_origyn","v0.1.0") + ] + }; + + //lets the NFT accept cycles + public func wallet_receive() : async Nat { + let amount = Cycles.available(); + let accepted = amount; + let deposit = Cycles.accept(accepted); + accepted; + }; + + + system func preupgrade() { + + + access_tokens_stable := Iter.toArray(access_tokens.entries()); + + let nft_library_stable_buffer = Buffer.Buffer<(Text, [(Text, CandyTypes.AddressedChunkArray)])>(nft_library.size()); + for(thisKey in nft_library.entries()){ + let this_library_buffer : Buffer.Buffer<(Text, CandyTypes.AddressedChunkArray)> = Buffer.Buffer<(Text, CandyTypes.AddressedChunkArray)>(thisKey.1.size()); + for(this_item in thisKey.1.entries()){ + this_library_buffer.add((this_item.0, Workspace.workspaceToAddressedChunkArray(this_item.1)) ); + }; + nft_library_stable_buffer.add((thisKey.0, this_library_buffer.toArray())); + }; + + nft_library_stable := nft_library_stable_buffer.toArray(); + + }; + + system func postupgrade() { + nft_library_stable := []; + access_tokens_stable := []; + + + /* + + state_current.collection_data := state.collection_data; + state_current.buckets := state.buckets; + state_current.allocations := state.allocations; + state_current.canister_availible_space := state.canister_availible_space; + state_current.canister_allocated_storage := state.canister_allocated_storage; + state_current.log := state.log; + state_current.log_history := state.log_history; + state_current.log_harvester := state.log_harvester; + state_current.offers := MigrationTypes.Current.Map.new(); + */ + + /* for(this_buyer in escrow_balances.entries()){ + + for(this_tokenID in this_buyer.1.entries()){ + + + for(this_ledger in this_tokenID.1.entries()){ + + for(thisBalance in this_ledger.1.entries()){ + let #principal(seller) = thisBalance.1.seller; + let #principal(buyer) = thisBalance.1.buyer; + state_current.offers := Map.set(state_current.offers, Map.phash, seller, buyer); + }; + + }; + + }; + + }; */ + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/market.mo b/src/origyn_nft_reference/market.mo new file mode 100644 index 0000000..14c229a --- /dev/null +++ b/src/origyn_nft_reference/market.mo @@ -0,0 +1,3495 @@ +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import Buffer "mo:base/Buffer"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversions "mo:candy_0_1_10/conversion"; +import Current "migrations/v000_001_000/types"; +import D "mo:base/Debug"; +import Error "mo:base/Error"; +import Float "mo:base/Float"; +import Hash "mo:base/Hash"; +import Int "mo:base/Int"; +import Iter "mo:base/Iter"; +import Ledger_Interface "ledger_interface"; +import Map "mo:map_6_0_0/Map"; +import Metadata "metadata"; +import MigrationTypes "./migrations/types"; +import Migrations "migrations/types"; +import Mint "mint"; +import NFTUtils "utils"; +import Nat "mo:base/Nat"; +import Nat32 "mo:base/Nat32"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import SHA256 "mo:crypto/SHA/SHA256"; +import Text "mo:base/Text"; +import Time "mo:base/Time"; +import Types "types"; + + +module { + + let debug_channel = { + verify_escrow = true; + verify_sale = false; + ensure = false; + invoice = false; + end_sale = false; + market = false; + royalties = false; + offers = false; + escrow = false; + withdraw_escrow = false; + withdraw_sale = false; + withdraw_reject = false; + withdraw_deposit = false; + bid = false; + }; + + let account_handler = Types.account_handler; + let token_handler = Types.token_handler; + + type StateAccess = Types.State; + + let SB = MigrationTypes.Current.SB; + + public func find_escrow_reciept( + state: StateAccess, + buyer : Types.Account, + seller: Types.Account, + token_id: Text) : Result.Result< + MigrationTypes.Current.EscrowLedgerTrie + , Types.OrigynError> { + + var found_asset : ?(Hash.Hash, Types.EscrowRecord) = null; + var found_asset_list : ?MigrationTypes.Current.EscrowLedgerTrie = null; + debug if(debug_channel.verify_escrow) D.print("found asset " # debug_show(found_asset)); + + let verified = switch(Map.get(state.state.escrow_balances, account_handler,buyer)){ + case(null){ + debug if(debug_channel.verify_escrow) D.print("didnt find asset"); + return #err(Types.errors(#no_escrow_found, "find_escrow_reciept - escrow buyer not found ", null)); + }; + case(?to_list){ + + debug if(debug_channel.verify_escrow) D.print("to_list is " # debug_show(Map.size(to_list))); + switch(Map.get(to_list, account_handler, seller)){ + case(null){ + debug if(debug_channel.verify_escrow) D.print("no escrow seller"); + return #err(Types.errors(#no_escrow_found, "find_escrow_reciept - escrow seller not found ", null));}; + case(?token_list){ + debug if(debug_channel.verify_escrow) D.print("looking for to list"); + let asset_list = switch(Map.get(token_list, Map.thash, token_id), Map.get(token_list, Map.thash, "")){ + case(null, null){ + return #err(Types.errors(#no_escrow_found, "find_escrow_reciept - escrow token_id not found ", null)); + }; + case(null, ?generalList){ + return #err(Types.errors(#no_escrow_found, "find_escrow_reciept - escrow token_id found for general item but token_id is specific ", null)); + }; + case(?asset_list, _ ){ + + found_asset_list := ?asset_list; + + }; + }; + + }; + }; + }; + }; + + switch(found_asset_list){ + case(?found_asset_list){ + return #ok(found_asset_list); + }; + case(null){ + return #err(Types.errors(#no_escrow_found, "find_escrow_reciept", null)); + }; + }; + + + + + }; + + //verifies that an escrow reciept exists in this NF + public func verify_escrow_reciept( + state: StateAccess, + escrow : Types.EscrowReceipt, + owner: ?Types.Account, + sale_id: ?Text) : Result.Result< + { + found_asset : {token_spec: Types.TokenSpec; escrow: Types.EscrowRecord}; + found_asset_list : MigrationTypes.Current.EscrowLedgerTrie; + }, Types.OrigynError> { + + var found_asset : ?{token_spec: Types.TokenSpec; escrow: Types.EscrowRecord} = null; + var found_asset_list : ?MigrationTypes.Current.EscrowLedgerTrie = null; + debug if(debug_channel.verify_escrow) D.print("found asset " # debug_show(found_asset)); + + let verified = switch(Map.get(state.state.escrow_balances, account_handler,escrow.buyer)){ + case(null){ + debug if(debug_channel.verify_escrow) D.print("didnt find asset"); + return #err(Types.errors(#no_escrow_found, "verify_escrow_reciept - escrow buyer not found ", null)); + }; + case(?to_list){ + //only the owner can sell it + debug if(debug_channel.verify_escrow) D.print("found to list" # debug_show(owner) # debug_show(escrow.seller)); + switch(owner){ + case(null){}; + case(?owner){ + if(Types.account_eq(owner, escrow.seller) == false){ + return #err(Types.errors(#unauthorized_access, "verify_escrow_reciept - escrow seller is not the owner ", null)); + }; + }; + }; + debug if(debug_channel.verify_escrow) D.print("to_list is " # debug_show(Map.size(to_list))); + switch(Map.get(to_list, account_handler, escrow.seller)){ + case(null){ + debug if(debug_channel.verify_escrow) D.print("no escrow seller"); + return #err(Types.errors(#no_escrow_found, "verify_escrow_reciept - escrow seller not found ", null));}; + case(?token_list){ + debug if(debug_channel.verify_escrow) D.print("looking for to list"); + let asset_list = switch(Map.get(token_list, Map.thash, escrow.token_id), Map.get(token_list, Map.thash, "")){ + case(null, null){ + return #err(Types.errors(#no_escrow_found, "verify_escrow_reciept - escrow token_id not found ", null)); + }; + case(null, ?generalList){ + return #err(Types.errors(#no_escrow_found, "verify_escrow_reciept - escrow token_id found for general item but token_id is specific ", null)); + }; + + + case(?asset_list, _ ){ + + //debug if(debug_channel.verify_escrow) D.print("testing hash" # debug_show(assetHash) # " " # debug_show(assetHash)); + found_asset_list := ?asset_list; + switch(Map.get(asset_list, token_handler, escrow.token)){ + + case(null){return #err(Types.errors(#no_escrow_found, "verify_escrow_reciept - escrow token spec not found ", null));}; + case(?balance){ + found_asset := ?{token_spec = escrow.token; escrow = balance}; + debug if(debug_channel.verify_escrow) D.print("Found an asset, checking fee"); + debug if(debug_channel.verify_escrow) D.print(debug_show(found_asset)); + debug if(debug_channel.verify_escrow) D.print(debug_show(escrow.amount)); + //check sale id + switch(sale_id, balance.sale_id){ + case(null, null){}; + case(?desired_sale_id, null){ + return #err(Types.errors(#sale_id_does_not_match, "verify_escrow_reciept - escrow sale_id does not match ", null)); + }; + case(null, ?on_file_saleID){ + //null is passed in as a sale id if we want to do sale id verification elsewhere + //return #err(Types.errors(#sale_id_does_not_match, "verify_escrow_reciept - escrow sale_id does not match ", null)); + }; + case(?desired_sale_id, ?on_file_saleID){ + if(desired_sale_id != on_file_saleID){ + return #err(Types.errors(#sale_id_does_not_match, "verify_escrow_reciept - escrow sale_id does not match ", null)); + }; + }; + }; + if(balance.amount >= escrow.amount){ + true; + } else {return #err(Types.errors(#withdraw_too_large, "verify_escrow_reciept - escrow not large enough", null));}; + }; + }; + }; + } + + }; + }; + }; + }; + + switch(found_asset, found_asset_list){ + case(?found_asset, ?found_asset_list){ + return #ok({ + found_asset = found_asset; + found_asset_list = found_asset_list; + }); + + }; + case(_){ + return #err(Types.errors(#nyi, "verify_escrow_reciept - should be unreachable ", null)); + }; + } + + + }; + + //verifies that a revenue reciept is in the NFT Canister + public func verify_sales_reciept( + state: StateAccess, + escrow : Types.EscrowReceipt) : Result.Result< + { + found_asset : {token_spec: Types.TokenSpec; escrow: Types.EscrowRecord}; + found_asset_list : MigrationTypes.Current.EscrowLedgerTrie; + }, Types.OrigynError> { + + var found_asset : ?{token_spec: Types.TokenSpec; escrow: Types.EscrowRecord} = null; + var found_asset_list : ?MigrationTypes.Current.EscrowLedgerTrie = null; + let verified = switch(Map.get(state.state.sales_balances, account_handler, escrow.seller)){ + case(null){ + debug if(debug_channel.verify_sale) D.print("sale seller not found"); + return #err(Types.errors(#no_escrow_found, "verify_sales_reciept - escrow seller not found ", null)); + }; + case(?to_list){ + //only the owner can sell it + + switch(Map.get(to_list, account_handler, escrow.buyer)){ + case(null){ + debug if(debug_channel.verify_sale) D.print("sale byer not found"); + return #err(Types.errors(#no_escrow_found, "verify_sales_reciept - escrow buyer not found ", null));}; + case(?token_list){ + switch(Map.get(token_list, Map.thash, escrow.token_id)){ + case(null){ + debug if(debug_channel.verify_sale) D.print("sale token id not found"); + return #err(Types.errors(#no_escrow_found, "verify_sales_reciept - escrow token_id not found ", null)); + }; + case(?asset_list){ + + found_asset_list := ?asset_list; + switch(Map.get(asset_list, token_handler,escrow.token)){ + + case(null){ + debug if(debug_channel.verify_sale) D.print("sale token not found"); + return #err(Types.errors(#no_escrow_found, "verify_sales_reciept - escrow token spec not found ", null));}; + case(?balance){ + found_asset := ?{token_spec = escrow.token; escrow = balance}; + debug if(debug_channel.verify_sale) D.print("issue with balances"); + debug if(debug_channel.verify_sale) D.print(debug_show(balance)); + debug if(debug_channel.verify_sale) D.print(debug_show(escrow)); + + if(balance.amount >= escrow.amount){ + true; + } else {return #err(Types.errors(#withdraw_too_large, "verify_sales_reciept - escrow not large enough", null));}; + }; + }; + }; + } + }; + }; + }; + }; + + switch(found_asset, found_asset_list){ + case(?found_asset, ?found_asset_list){ + return #ok({ + found_asset = found_asset; + found_asset_list = found_asset_list; + }); + }; + case(_){ + return #err(Types.errors(#nyi, "verify_sales_reciept - should be unreachable ", null)); + }; + } + }; + + //makes sure that there is not an ongoing sale for an item + public func is_token_on_sale( + state: StateAccess, + metadata: CandyTypes.CandyValue, + caller: Principal) : Result.Result{ + + debug if(debug_channel.ensure) D.print("in ensure"); + let token_id = switch(Metadata.get_nft_id(metadata)){ + case(#err(err_)){return #err(Types.errors(#token_not_found, "_ensure_no_existing_sale - could not find token_id ", ?caller));}; + case(#ok(val)){val}; + }; + + //look for an existing sale + debug if(debug_channel.verify_sale) D.print("geting sale"); + switch(Metadata.get_current_sale_id(metadata)){ + case(#Empty){return #ok(false)}; + case(#Text(sale_id)){ + debug if(debug_channel.verify_sale) D.print("found sale" # sale_id); + + let current_sale = switch(Map.get(state.state.nft_sales, Map.thash, sale_id)){ + case(?status){ + status; + }; + case(null){return #err(Types.errors(#sale_not_found, "_ensure_no_existing_sale - could not find sale for token " # token_id # " " # sale_id, ?caller));}; + }; + + + debug if(debug_channel.verify_sale) D.print("checking state"); + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(current_sale)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(Types.errors(err.error, "_ensure_no_existing_sale - find sale state " # err.flag_point, ?caller)); + } + }; + debug if(debug_channel.verify_sale) D.print("switching config"); + switch(current_sale_state.config){ + case(#auction(config)){ + debug if(debug_channel.verify_sale) D.print("current config" # debug_show(config)); + switch(current_sale_state.status){ + case(#closed){ + //can continue + return #ok(false); + }; + case(#open){ + //can continue + return #ok(true); + }; + case(_){ + return #ok(true); + }; + }; + }; + case(_){ + return #err(Types.errors(#nyi, "_ensure_no_existing_sale - sales type check not implemented", ?caller)); + }; + }; + }; + case(_){return #err(Types.errors(#nyi, "_ensure_no_existing_sale - sales id did not match ", ?caller));}; + }; + }; + + //opens a sale if it is past the date + public func open_sale_nft_origyn(state: StateAccess, token_id: Text, caller: Principal) : Result.Result { + //D.print("in end_sale_nft_origyn"); + let metadata = switch(Metadata.get_metadata_for_token(state,token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "open_sale_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + //look for an existing sale + //nyi: refactor this with details found in other functions + let current_sale = switch(Metadata.get_current_sale_id(metadata)){ + case(#Empty){return #err(Types.errors(#sale_not_found, "open_sale_nft_origyn - could not find sale for token " # token_id, ?caller));}; + case(#Text(val)){ + switch(Map.get(state.state.nft_sales, Map.thash,val)){ + case(?status){ + status; + }; + case(null){return #err(Types.errors(#sale_not_found, "open_sale_nft_origyn - could not find sale for token " # token_id, ?caller));}; + }; + }; + case(_){return #err(Types.errors(#sale_not_found, "open_sale_nft_origyn - could not find sale for token " # token_id, ?caller));}; + }; + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(current_sale)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(Types.errors(err.error, "open_sale_nft_origyn - find state " # err.flag_point, ?caller)); + }; + }; + + switch(current_sale_state.config){ + case(#auction(config)){ + let current_pricing = switch(current_sale_state.config){ + case(#auction(config)){ + config; + }; + case(_){ + return #err(Types.errors(#sale_not_found, "open_sale_nft_origyn - not an auction type ", ?caller)); + + }; + }; + + + + + + switch(current_sale_state.status){ + case(#closed){ + return #err(Types.errors(#auction_ended, "open_sale_nft_origyn - auction already closed ", ?caller)); + }; + case(#not_started){ + if(state.get_time() >= current_pricing.start_date and state.get_time() < current_sale_state.end_date){ + current_sale_state.status := #open; + return(#ok(#open_sale(true))); + } else { + return #err(Types.errors(#auction_not_started, "open_sale_nft_origyn - auction does not need to be opened " # debug_show(current_pricing.start_date), ?caller)); + }; + }; + case(#open){ + return #err(Types.errors(#auction_not_started, "open_sale_nft_origyn - auction already open", ?caller)); + }; + }; + }; + case(_){ + return #err(Types.errors(#sale_not_found, "open_sale_nft_origyn - not an auction type ", ?caller)); + + }; + }; + }; + + //reports information about a sale + public func sale_status_nft_origyn(state: StateAccess, sale_id: Text, caller: Principal) : Result.Result { + + //look for an existing sale + let current_sale = switch(Map.get(state.state.nft_sales, Map.thash,sale_id)){ + case(?status){ + status; + }; + case(null){return #ok(#status(null))}; + }; + + + let result = #ok(#status(?{ + sale_id = current_sale.sale_id; + token_id = current_sale.token_id; + original_broker_id = current_sale.original_broker_id; + broker_id = current_sale.broker_id; + sale_type = switch(current_sale.sale_type){ + case(#auction(val)){ + #auction(Types.AuctionState_stabalize_for_xfer(val)) + }; + /* case(_){ + return #err(Types.errors(#sale_not_found, "sale_status_nft_origyn not an auction ", ?caller)); + + } */ + }; + })); + + return result; + }; + + //returns active sales on a canister + public func active_sales_nft_origyn(state: StateAccess, pages: ?(Nat, Nat), caller: Principal) : Result.Result { + + var tracker = 0 : Nat; + let (min, max, total, eof) = switch(pages){ + case(null){ + (0, Map.size(state.state.nft_metadata), Map.size(state.state.nft_metadata), true); + }; + case(?val){ + (val.0, + if(val.0 + val.1 >= Map.size(state.state.nft_metadata)){ + Map.size(state.state.nft_metadata) + } else { + val.0 + val.1; + }, + Map.size(state.state.nft_metadata), + if(val.0 + val.1 >= Map.size(state.state.nft_metadata)){ + true; + } else { + false; + }, + ); + }; + }; + + let results = Buffer.Buffer<(Text, ?Types.SaleStatusStable)>(max - min); + + label search for(this_token in Map.entries(state.state.nft_metadata)){ + if(tracker > max){break search;}; + if(tracker >= min){ + + let metadata = switch(Metadata.get_metadata_for_token(state, this_token.0, caller, null, state.state.collection_data.owner)){ + case(#err(err)){ + results.add("unminted", null); + tracker += 1; + continue search; + }; + case(#ok(val)){ + val; + }; + }; + + //look for an existing sale + + let current_sale = switch(Metadata.get_current_sale_id(metadata)){ + case(#Empty){ + results.add(this_token.0, null); + tracker += 1; + continue search; + }; + case(#Text(val)){ + switch(Map.get(state.state.nft_sales, Map.thash,val)){ + case(?status){ + status; + }; + case(null){ + results.add(this_token.0, null); + tracker += 1; + continue search; + }; + }; + }; + case(_){ + + results.add(this_token.0, null); + tracker += 1; + continue search; + + }; + }; + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(current_sale)){ + case(#ok(val)){val}; + case(#err(err)){ + + results.add(this_token.0, null); + tracker += 1; + continue search; + + }; + }; + + switch(current_sale_state.config){ + case(#auction(config)){ + let current_pricing = switch(current_sale_state.config){ + case(#auction(config)){ + config; + }; + case(_){ + //nyi: handle other sales types + results.add(this_token.0, null); + tracker += 1; + continue search; + }; + }; + + results.add(this_token.0, ?{ + sale_id = current_sale.sale_id; + token_id = current_sale.token_id; + broker_id = current_sale.broker_id; + original_broker_id = current_sale.original_broker_id; + sale_type = switch(current_sale.sale_type){ + case(#auction(val)){ + #auction(Types.AuctionState_stabalize_for_xfer(val)) + }; + }; + }); + + }; + case(_){ + results.add(this_token.0, null); + }; + }; + }; + tracker += 1; + }; + + return #ok(#active({ + records = results.toArray(); + eof = eof; + count = total; + })); + }; + + + //returns a history of sales + public func history_sales_nft_origyn(state: StateAccess, pages: ?(Nat, Nat), caller: Principal) : Result.Result { + + var tracker = 0 : Nat; + let (min, max, total, eof) = switch(pages){ + case(null){ + (0, Map.size(state.state.nft_sales), Map.size(state.state.nft_sales), true); + }; + case(?val){ + (val.0, + if(val.0 + val.1 >= Map.size(state.state.nft_sales)){ + Map.size(state.state.nft_sales) + } else { + val.0 + val.1; + }, + Map.size(state.state.nft_sales), + if(val.0 + val.1 >= Map.size(state.state.nft_sales)){ + true; + } else { + false; + }, + ); + }; + }; + + let results = Buffer.Buffer(max - min); + + label search for(thisSale in Map.entries(state.state.nft_sales)){ + if(tracker > max){break search;}; + if(tracker >= min){ + + + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(thisSale.1)){ + case(#ok(val)){val}; + case(#err(err)){ + + results.add(null); + tracker += 1; + continue search; + + }; + }; + + switch(current_sale_state.config){ + case(#auction(config)){ + let current_pricing = switch(current_sale_state.config){ + case(#auction(config)){ + config; + }; + case(_){ + //nyi: handle other sales types + results.add( null); + tracker += 1; + continue search; + }; + }; + + results.add(?{ + sale_id = thisSale.1.sale_id; + token_id = thisSale.1.token_id; + broker_id = thisSale.1.broker_id; + original_broker_id = thisSale.1.original_broker_id; + sale_type = switch(thisSale.1.sale_type){ + case(#auction(val)){ + #auction(Types.AuctionState_stabalize_for_xfer(val)) + }; + }; + } + ); + + }; + case(_){ + //nyi: implement other sales types + results.add(null); + }; + }; + }; + tracker += 1; + }; + + return #ok(#history({ + records = results.toArray(); + eof = eof; + count = total; + })); + }; + + //returns an invoice or details of where a user can send their depoits on a standard ledger + public func deposit_info_nft_origyn(state: StateAccess, request: ?Types.Account, caller: Principal) : Result.Result { + + debug if(debug_channel.invoice) D.print("in deposit info nft origyn."); + + let account = switch(request){ + case(null){#principal(caller)}; + case(?val){val}; + }; + + debug if(debug_channel.invoice) D.print("getting info for " # debug_show(account)); + return #ok(#deposit_info(NFTUtils.get_deposit_info(account, state.canister()))); + }; + + + //ends a sale if it is past the date or a buy it now has occured + public func end_sale_nft_origyn(state: StateAccess, token_id: Text, caller: Principal) : async Result.Result { + debug if(debug_channel.end_sale) D.print("in end_sale_nft_origyn"); + let metadata = switch(Metadata.get_metadata_for_token(state,token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "end_sale_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + let owner = switch(Metadata.get_nft_owner(metadata)){ + case(#err(err)){ + return #err(Types.errors(err.error, "end_sale_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + + + //look for an existing sale + let current_sale = switch(Metadata.get_current_sale_id(metadata)){ + case(#Empty){return #err(Types.errors(#sale_not_found, "end_sale_nft_origyn - could not find sale for token " # token_id, ?caller));}; + case(#Text(val)){ + switch(Map.get(state.state.nft_sales, Map.thash,val)){ + case(?status){ + status; + }; + case(null){return #err(Types.errors(#sale_not_found, "end_sale_nft_origyn - could not find sale for token " # token_id, ?caller));}; + }; + }; + case(_){return #err(Types.errors(#sale_not_found, "end_sale_nft_origyn - could not find sale for token " # token_id, ?caller));}; + }; + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(current_sale)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(Types.errors(err.error, "end_sale_nft_origyn - find state " # err.flag_point, ?caller)); + }; + }; + + switch(current_sale_state.config){ + case(#auction(config)){ + let current_pricing = switch(current_sale_state.config){ + case(#auction(config)){ + config; + }; + case(_){ + return #err(Types.errors(#sale_not_found, "end_sale_nft_origyn - not an auction type ", ?caller)); + + }; + }; + + + + let buy_now = switch(current_pricing.buy_now){ + + case(null){false}; + case(?val){ + if(val <= current_sale_state.current_bid_amount){ + true; + } else { + false; + }; + }; + }; + + debug if(debug_channel.end_sale) D.print("have buy now" # debug_show(buy_now, current_pricing.buy_now, current_sale_state.current_bid_amount)); + + switch(current_sale_state.status){ + case(#closed){ + //we will close later after we try to refund a valid bid + //return #err(Types.errors(#auction_ended, "end_sale_nft_origyn - auction already closed ", ?caller)); + }; + case(#not_started){ + debug if(debug_channel.end_sale) D.print("wasnt started"); + + if(state.get_time() >= current_pricing.start_date and state.get_time() < current_sale_state.end_date){ + current_sale_state.status := #open; + }; + }; + case(_){}; + }; + + debug if(debug_channel.end_sale) D.print("handled current stauts" # debug_show(buy_now, current_pricing.buy_now, current_sale_state.current_bid_amount)); + + + //make sure auction is still over + if(state.get_time() < current_sale_state.end_date ){ + if( buy_now == true and caller == state.canister()){ + //only the canister can end a buy now + } else { + + return #err(Types.errors(#sale_not_over, "end_sale_nft_origyn - auction still running ", ?caller)); + + }; + + }; + + debug if(debug_channel.end_sale) D.print("checking reserve" # debug_show(config.reserve)); + + + + //check reserve MKT0038 + switch(config.reserve){ + case(?reserve){ + if(current_sale_state.current_bid_amount < reserve){ + //end sale but don't move NFT + current_sale_state.status := #closed; + + + switch(Metadata.add_transaction_record(state,{ + token_id = token_id; + index = 0; + txn_type = #sale_ended { + seller = owner; + buyer = owner; + token = config.token; + sale_id = ?current_sale.sale_id; + amount = 0; + extensible = #Text("reserve not met"); + }; + timestamp = state.get_time(); + }, caller)){ + case(#ok(new_trx)){ + return #ok(#end_sale(new_trx)); + }; + case(#err(err)){ + return #err(err); + }; + }; + }; + }; + case(null){}; + }; + + debug if(debug_channel.end_sale) D.print("checking escrow" # debug_show(current_sale_state.current_escrow)); + + switch(current_sale_state.current_escrow){ + case(null){ + //end sale but don't move NFT + current_sale_state.status := #closed; + + switch(Metadata.add_transaction_record(state,{ + token_id = token_id; + index = 0; + txn_type = #sale_ended { + seller = owner; + buyer = owner; + token = config.token; + sale_id = ?current_sale.sale_id; + amount = 0; + extensible = #Text("no bids"); + }; + timestamp = state.get_time(); + }, caller)){ + case(#ok(new_trx)){ + return #ok(#end_sale(new_trx)); + }; + case(#err(err)){ + return #err(err); + }; + }; + + + + + }; + case(?winning_escrow){ + debug if(debug_channel.end_sale) D.print("verifying escrow"); + debug if(debug_channel.end_sale) D.print(debug_show(winning_escrow)); + let verified = switch(verify_escrow_reciept(state, winning_escrow, ?owner, ?current_sale.sale_id)){ + case(#err(err)){return #err(Types.errors(err.error, "end_sale_nft_origyn verifying escrow " # err.flag_point, ?caller))}; + case(#ok(res)){ + res; + }; + }; + debug if(debug_channel.end_sale) D.print("verified is " # debug_show(verified.found_asset)); + + //move the payment to the sale revenue account + //nyi: use transfer batch to split across royalties + + let (trx_id : Types.TransactionID, account_hash : ?Blob, fee : Nat) = switch(winning_escrow.token){ + case(#ic(token)){ + switch(token.standard){ + case(#Ledger){ + debug if(debug_channel.end_sale) D.print("found ledger"); + let checker = Ledger_Interface.Ledger_Interface(); + switch(await checker.transfer_sale(state.canister(), winning_escrow, token_id, caller)){ + case(#ok(val)){ + (val.0,?val.1.account.sub_account, token.fee); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "end_sale_nft_origyn " # err.flag_point, ?caller)); + }; + }; + + }; + case(_){ + return #err(Types.errors(#nyi, "end_sale_nft_origyn - ic type nyi - " # debug_show(token), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "end_sale_nft_origyn - extensible token nyi - " # debug_show(val), ?caller)); + }; + }; + + //change owner + var new_metadata : CandyTypes.CandyValue = switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [ + { + name = Types.metadata.owner; + mode = #Set(switch(winning_escrow.buyer){ + case(#principal(buyer)){#Principal(buyer);}; + case(#account_id(buyer)){#Text(buyer);}; + case(#account(buyer)){#Array(#frozen([#Principal(buyer.owner), switch(buyer.sub_account){ + case(null){#Option(null)}; + case(?val){#Option(?#Blob(val))} + }]))}; + case(#extensible(buyer)){buyer;}; + }); + } + ])){ + case(#ok(props)){ + #Class(props); + }; + case(#err(err)){ + return #err(Types.errors(#update_class_error, "end_sale_nft_origyn - error setting owner " # token_id, ?caller)); + }; + }; + + debug if(debug_channel.end_sale) D.print("updating metadata"); + + //clear shared wallets + new_metadata := Metadata.set_system_var(new_metadata, Types.metadata.__system_wallet_shares, #Empty); + Map.set(state.state.nft_metadata, Map.thash, token_id, new_metadata); + + current_sale_state.end_date := state.get_time(); + current_sale_state.status := #closed; + current_sale_state.winner := ?winning_escrow.buyer; + + //remove escrow + debug if(debug_channel.end_sale) D.print("putting escrow balance"); + debug if(debug_channel.end_sale) D.print(debug_show(winning_escrow)); + if(verified.found_asset.escrow.amount < winning_escrow.amount){ + return #err(Types.errors(#no_escrow_found, "end_sale_nft_origyn - error finding escrow, now less than bid " # debug_show(winning_escrow), ?caller)); + + } else { + if(verified.found_asset.escrow.amount > winning_escrow.amount ){ + let total_amount = Nat.sub(verified.found_asset.escrow.amount, winning_escrow.amount); + Map.set(verified.found_asset_list, token_handler, verified.found_asset.token_spec, { + amount = total_amount; + seller = verified.found_asset.escrow.seller; + balances = null; + buyer = verified.found_asset.escrow.buyer; + token_id = verified.found_asset.escrow.token_id; + token = verified.found_asset.escrow.token; + sale_id = verified.found_asset.escrow.sale_id; //should be null + lock_to_date = verified.found_asset.escrow.lock_to_date; + account_hash = verified.found_asset.escrow.account_hash; + + }); + } else { + Map.delete(verified.found_asset_list, token_handler,verified.found_asset.token_spec); + }; + }; + + + //log royalties + //currently for auctions there are only secondary royalties + let royalty= switch(Properties.getClassProperty(metadata, Types.metadata.__system)){ + case(null){ + []; + }; + case(?val){ + + switch(Properties.getClassProperty(val.value, Types.metadata.__system_secondary_royalty)){ + case(null){ + []; + }; + case(?list){ + switch(list.value){ + case(#Array(the_array)){ + switch(the_array){ + case(#thawed(val)){ + val; + }; + case(#frozen(val)){ + val; + }; + }; + }; + case(_){ + []; + }; + }; + }; + }; + + }; + }; + + + + //let royaltyList = Buffer.Buffer<(Types.Account, Nat)>(royalty.size() + 1); + if(winning_escrow.amount > fee){ + //if the fee is bigger than the amount we aren't going to pay anything + //this should really be prevented elsewhere + let total = Nat.sub(winning_escrow.amount, fee); + var remaining = Nat.sub(winning_escrow.amount, fee); + + + remaining := _process_royalties(state, { + var remaining = remaining; + total = total; + fee = fee; + escrow = winning_escrow; + royalty = royalty; + broker_id = current_sale_state.current_broker_id; + original_broker_id = current_sale.original_broker_id; + sale_id = ?current_sale.sale_id; + account_hash = account_hash; + metadata = metadata; + }, caller); + + + + //D.print("putting Sales balance"); + //D.print(debug_show(winning_escrow)); + + let new_sale_balance = put_sales_balance(state, { + amount = remaining; + seller = winning_escrow.seller; + + buyer = winning_escrow.buyer; + token = winning_escrow.token; + token_id = winning_escrow.token_id; + sale_id = ?current_sale.sale_id; + lock_to_date = null; + account_hash = account_hash; + }, true); + }; + + + switch(Metadata.add_transaction_record(state,{ + token_id = token_id; + index = 0; + txn_type = #sale_ended { + seller = winning_escrow.seller; + buyer = winning_escrow.buyer; + token = winning_escrow.token; + sale_id = ?current_sale.sale_id; + amount = winning_escrow.amount; + + extensible = #Empty; + }; + timestamp = state.get_time(); + }, caller)){ + case(#ok(new_trx)){ + return #ok(#end_sale(new_trx)); + }; + case(#err(err)){ + return #err(err); + }; + }; + }; + }; + }; + case(_){ + return #err(Types.errors(#sale_not_found, "end_sale_nft_origyn - not an auction type ", ?caller)); + + }; + }; + + return #err(Types.errors(#nyi, "end_sale_nft_origyn - nyi - " , ?caller)); + + }; + + //processes a change in escrow balance + public func put_escrow_balance( + state: StateAccess, + escrow: Types.EscrowRecord, + append: Bool): Types.EscrowRecord{ + //add the escrow + + var a_from = switch(Map.get(state.state.escrow_balances, account_handler, escrow.buyer)){ + case(null){ + let new_from = Map.new>>(); + Map.set(state.state.escrow_balances, account_handler, escrow.buyer, new_from); + new_from; + }; + case(?val){ + val; + }; + }; + + var a_to = switch(Map.get(a_from, account_handler, escrow.seller)){ + case(null){ + let newTo = Map.new>(); + Map.set(a_from, account_handler, escrow.seller, newTo); + + //add this item to the offer index + switch(Map.get>(state.state.offers, account_handler, escrow.seller)){ + case(null){ + var aTree = Map.new(); + Map.set(aTree, account_handler, escrow.buyer, state.get_time()); + + Map.set>(state.state.offers, account_handler, escrow.seller, aTree); + }; + case(?val){ + Map.set(val, account_handler, escrow.buyer, state.get_time()); + + Map.set>(state.state.offers, account_handler, escrow.seller, val); + }; + }; + newTo; + }; + case(?val){ + val; + }; + }; + + var a_token_id = switch(Map.get(a_to, Map.thash, escrow.token_id)){ + case(null){ + let new_token_id = Map.new(); + Map.set(a_to, Map.thash, escrow.token_id, new_token_id); + new_token_id; + }; + case(?val){ + val; + }; + }; + + switch(Map.get(a_token_id, token_handler, escrow.token)){ + case(null){ + + + Map.set(a_token_id,token_handler,escrow.token, escrow); + return escrow; + }; + case(?val){ + + //note: sale_id will overwrite to save user clicks; alternative is to make them clear it and submit a new escrow + //nyi: add transaction for overwriting sale id + let newLedger = if(append == true){ + { + account_hash = escrow.account_hash; + amount = val.amount + escrow.amount; + seller = escrow.seller; + buyer = escrow.buyer; + balances = null; + token = escrow.token; + token_id = escrow.token_id; + sale_id = escrow.sale_id; //the user has staked for a new sale and we should use the new one. + lock_to_date = escrow.lock_to_date; + }; + } else { + { + account_hash = escrow.account_hash; + amount = escrow.amount; + seller = escrow.seller; + buyer = escrow.buyer; + balances = null; + token = escrow.token; + token_id = escrow.token_id; + sale_id = escrow.sale_id; + lock_to_date = escrow.lock_to_date; + }; + }; + Map.set(a_token_id, token_handler, escrow.token, newLedger); + return newLedger; + }; + }; + + }; + + //processes a changing sale balance + public func put_sales_balance(state: StateAccess, sale_balance: Types.EscrowRecord, append: Bool): Types.EscrowRecord { + //add the sale + var a_to = switch(Map.get(state.state.sales_balances, account_handler, sale_balance.seller)){ + case(null){ + let newTo = Map.new>>(); + Map.set(state.state.sales_balances, account_handler, sale_balance.seller, newTo); + newTo; + }; + case(?val){ + val; + }; + }; + + var a_from = switch(Map.get(a_to, account_handler, sale_balance.buyer)){ + case(null){ + let new_from = Map.new>(); + Map.set(a_to, account_handler, sale_balance.buyer, new_from); + new_from; + }; + case(?val){ + val; + }; + }; + + var a_token_id = switch(Map.get(a_from, Map.thash, sale_balance.token_id)){ + case(null){ + let new_token_id = Map.new(); + Map.set(a_from, Map.thash, sale_balance.token_id, new_token_id); + new_token_id; + }; + case(?val){ + val; + }; + }; + + switch(Map.get(a_token_id, token_handler, sale_balance.token)){ + case(null){ + + + Map.set(a_token_id, token_handler, sale_balance.token, sale_balance); + return sale_balance; + }; + case(?val){ + + + //note: sale_id will overwrite to save user clicks; alternative is to make them clear it and submit a new escrow + //nyi: add transaction for overwriting sale id + + let newLedger = if(append == true){ + + { + account_hash = sale_balance.account_hash; + + amount = val.amount + sale_balance.amount; + seller = sale_balance.seller; + buyer = sale_balance.buyer; + token = sale_balance.token; + token_id = sale_balance.token_id; + sale_id = sale_balance.sale_id; + lock_to_date = sale_balance.lock_to_date; + } //this is a more recent sales id so we use it + } else {{ + account_hash = sale_balance.account_hash; + + amount = sale_balance.amount; + seller = sale_balance.seller; + buyer = sale_balance.buyer; + token = sale_balance.token; + token_id = sale_balance.token_id; + sale_id = sale_balance.sale_id; + lock_to_date = sale_balance.lock_to_date; + + } + }; + Map.set(a_token_id, token_handler, sale_balance.token, newLedger); + return newLedger; + }; + }; + + }; + + //handles async market transfer operations like instant where interaction with other canisters is required + public func market_transfer_nft_origyn_async(state: StateAccess, request : Types.MarketTransferRequest, caller: Principal) : async Result.Result { + + debug if(debug_channel.market) D.print("in market_transfer_nft_origyn"); + var metadata = switch(Metadata.get_metadata_for_token(state, request.token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "market_transfer_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + debug if(debug_channel.market) D.print("have metadata" # debug_show(metadata)); + + //can't start auction if token is soulbound + if (Metadata.is_soulbound(metadata)) { + return #err(Types.errors(#token_non_transferable, "market_transfer_nft_origyn ", ?caller)); + }; + + let owner = switch( + Metadata.get_nft_owner(metadata)){ + case(#err(err)){ + return #err(Types.errors(err.error, "market_transfer_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + debug if(debug_channel.market) D.print("have owner " # debug_show(owner)); + debug if(debug_channel.market) D.print("the caller" # debug_show(caller)); + + //check to see if there is a current sale going on MKT0018 + + let this_is_minted = Metadata.is_minted(metadata); + debug if(debug_channel.market) D.print(request.token_id # " isminted" # debug_show(this_is_minted)); + if(this_is_minted){ + //this is a minted NFT - only the nft owner or nft manager can sell it + switch(Metadata.is_nft_owner(metadata, #principal(caller))){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn - not an owner of the NFT - minted sale" # err.flag_point, ?caller))}; + case(#ok(val)){ + if(val == false){return #err(Types.errors(#unauthorized_access, "market_transfer_nft_origyn - not an owner of the NFT - minted sale", ?caller))}; + }; + }; + } else { + //this is a staged NFT it can be sold by the canister owner or the canister manager + if(NFTUtils.is_owner_manager_network(state,caller) == false){return #err(Types.errors(#unauthorized_access, "market_transfer_nft_origyn - not an owner of the canister - staged sale ", ?caller))}; + }; + + debug if(debug_channel.market) D.print("have minted " # debug_show(this_is_minted)); + + //look for an existing sale + switch(is_token_on_sale(state, metadata, caller)){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn ensure_no_sale " # err.flag_point, ?caller))}; + case(#ok(val)){ + if(val == true){ + return #err(Types.errors(#existing_sale_found, "market_transfer_nft_origyn - sale exists " # request.token_id , ?caller)); + }; + }; + + }; + + debug if(debug_channel.market) D.print("checking pricing"); + + switch(request.sales_config.pricing){ + case(#instant){ + //the nft or staged nft is being instant transfered + + //if this is a marketable NFT, we need to create a waiver period + + //if this is not a marketable NFT we can insta trade + + //since this is a stage we need to call mint and it will do this for us + //set new owner + debug if(debug_channel.market) D.print("in market transfer"); + switch(request.sales_config.escrow_receipt){ + case(null){ + //we can't insta transfer because no instructions are given + //D.print("no escrow set"); + return #err(Types.errors(#improper_interface, "market_transfer_nft_origyn verifying escrow - not included ", ?caller)); + + }; + case(?escrow){ + //we should verify the escrow + if(this_is_minted){ + if(escrow.token_id == ""){ + //can't escrow to general for minted item + return #err(Types.errors(#no_escrow_found, "market_transfer_nft_origyn can't find specific escrow for minted item", ?caller)); + }; + }; + debug if(debug_channel.market) D.print("current escrow is"); + + //verify the specific escrow + + debug if(debug_channel.market) D.print(debug_show(escrow.seller)); + debug if(debug_channel.market) D.print(debug_show(escrow.buyer)); + debug if(debug_channel.market) D.print(escrow.token_id); + debug if(debug_channel.market) D.print(debug_show(Types.token_hash(escrow.token))); + debug if(debug_channel.market) D.print(debug_show(escrow.amount)); + + let verified = switch(verify_escrow_reciept(state, escrow, ?owner, null)){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn verifying escrow " # err.flag_point, ?caller))}; + case(#ok(res)){ + res; + }; + }; + + //reentrancy risk so we remove the credit from the escrow + debug if(debug_channel.market) D.print("updating the asset list"); + debug if(debug_channel.market) D.print(debug_show(Map.size(verified.found_asset_list))); + debug if(debug_channel.market) D.print(debug_show(Iter.toArray(Map.entries(verified.found_asset_list)))); + if(verified.found_asset.escrow.amount > escrow.amount){ + debug if(debug_channel.market) D.print("should be overwriting escrow" # debug_show((verified.found_asset.escrow.amount,escrow.amount))); + Map.set(verified.found_asset_list, token_handler, verified.found_asset.token_spec, { + account_hash = verified.found_asset.escrow.account_hash; + amount = Nat.sub(verified.found_asset.escrow.amount,escrow.amount); + seller = verified.found_asset.escrow.seller; + balances = null; + buyer = verified.found_asset.escrow.buyer; + token_id = verified.found_asset.escrow.token_id; + token = verified.found_asset.escrow.token; + sale_id = verified.found_asset.escrow.sale_id; + lock_to_date = verified.found_asset.escrow.lock_to_date;//should be null + }); + } else { + debug if(debug_channel.market) D.print("should be deleting escrow" # debug_show((verified.found_asset.token_spec))); + Map.delete(verified.found_asset_list, token_handler, verified.found_asset.token_spec); + }; + + debug if(debug_channel.market) D.print(debug_show(Map.size(verified.found_asset_list))); + debug if(debug_channel.market) D.print(debug_show(Iter.toArray(Map.entries(verified.found_asset_list)))); + + let (trx_id : Types.TransactionID, account_hash : ?Blob, fee : Nat) = switch(escrow.token){ + case(#ic(token)){ + switch(token.standard){ + case(#Ledger){ + debug if(debug_channel.market) D.print("found ledger and sending sale " # debug_show(escrow)); + let checker = Ledger_Interface.Ledger_Interface(); + try{ + switch(await checker.transfer_sale(state.canister(), escrow, request.token_id, caller)){ + case(#ok(val)){ + (val.0, ?val.1.account.sub_account, val.2); + }; + case(#err(err)){ + //put the escrow back because the payment failed + switch(verify_escrow_reciept(state, escrow, ?owner, null)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, escrow.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(reverify.found_asset_list, token_handler, verified.found_asset.token_spec, target_escrow); + + + }; + case(#err(err)){ + let target_escrow = { + account_hash = verified.found_asset.escrow.account_hash; + amount = escrow.amount; + buyer = verified.found_asset.escrow.buyer; + seller = verified.found_asset.escrow.seller; + token_id = verified.found_asset.escrow.token_id; + token = verified.found_asset.escrow.token; + sale_id = verified.found_asset.escrow.sale_id; + lock_to_date = verified.found_asset.escrow.lock_to_date; + }; + Map.set(verified.found_asset_list, token_handler, verified.found_asset.token_spec, target_escrow); + } + }; + return #err(Types.errors(err.error, "market_transfer_nft_origyn instant " # err.flag_point, ?caller)); + }; + }; + } catch (e){ + //put the escrow back because payment failed + switch(verify_escrow_reciept(state, escrow, ?owner, null)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, escrow.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(reverify.found_asset_list, token_handler, verified.found_asset.token_spec, target_escrow); + + + }; + case(#err(err)){ + let target_escrow = { + account_hash = verified.found_asset.escrow.account_hash; + amount = escrow.amount; + buyer = verified.found_asset.escrow.buyer; + seller = verified.found_asset.escrow.seller; + token_id = verified.found_asset.escrow.token_id; + token = verified.found_asset.escrow.token; + sale_id = verified.found_asset.escrow.sale_id; + lock_to_date = verified.found_asset.escrow.lock_to_date; + }; + Map.set(verified.found_asset_list, token_handler, verified.found_asset.token_spec, target_escrow); + } + }; + + return #err(Types.errors(#unauthorized_access, "market_transfer_nft_origyn instant catch branch" # Error.message(e), ?caller)); + + + }; + + }; + case(_){ + return #err(Types.errors(#nyi, "market_transfer_nft_origyn - ic type nyi - " # debug_show(token), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "market_transfer_nft_origyn - extensible token nyi - " # debug_show(val), ?caller)); + }; + }; + + debug if(debug_channel.market) D.print("transfered to account hash " # debug_show(account_hash)); + + var b_freshmint = false; + + let txn_record = if(this_is_minted == false){ + //execute mint should add mint transaction + b_freshmint := true; + switch(Mint.execute_mint(state, request.token_id, escrow.buyer, ?escrow, caller )){ + case(#err(err)){ + return #err(Types.errors(err.error, "market_transfer_nft_origyn mint attempt" # err.flag_point, ?caller)); + }; + case(#ok(val)){ + debug if(debug_channel.market) D.print("updating metadata after mint"); + metadata := val.1; + switch(Metadata.add_transaction_record(state,{ + token_id = request.token_id; + index = 0; //mint should always be 0 + txn_type = #mint({ + from = owner; + to = escrow.buyer; + sale = ?{ + token = escrow.token; + amount = escrow.amount; + }; + extensible = #Empty; + }); + timestamp = Time.now(); + }, caller)){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn adding transaction" # err.flag_point, ?caller));}; + case(#ok(val)){val}; + }; + + }; + }; + } else{ + + //change owner + var new_metadata : CandyTypes.CandyValue = switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [ + { + name = Types.metadata.owner; + mode = #Set(switch(escrow.buyer){ + case(#principal(buyer)){#Principal(buyer);}; + case(#account_id(buyer)){#Text(buyer);}; + case(#extensible(buyer)){buyer;}; + case(#account(buyer)){#Array(#frozen([#Principal(buyer.owner), #Option(switch(buyer.sub_account){case(null){null}; case(?val){?#Blob(val);}})]))}; + }); + } + ])){ + case(#ok(props)){ + #Class(props); + }; + case(#err(err)){ + return #err(Types.errors(#update_class_error, "Market transfer Origyn - error setting owner " # escrow.token_id, ?caller)); + }; + }; + + new_metadata := Metadata.set_system_var(new_metadata, Types.metadata.__system_wallet_shares, #Empty); + + //D.print("updating metadata"); + Map.set(state.state.nft_metadata, Map.thash, escrow.token_id, new_metadata); + metadata := new_metadata; + //no need to mint + switch(Metadata.add_transaction_record(state,{ + token_id = request.token_id; + index = 0; //mint should always be 0 + txn_type = #sale_ended({ + seller = owner; + buyer = escrow.buyer; + token = escrow.token; + amount = escrow.amount; + sale_id = null; + extensible = #Empty; + }); + timestamp = Time.now(); + }, caller)){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn adding transaction" # err.flag_point, ?caller));}; + case(#ok(val)){val}; + }; + }; + + + + + + //escrow already invalidated + //calculate royalties + debug if(debug_channel.market) D.print("trying to invalidate asset"); + debug if(debug_channel.market) D.print(debug_show(verified.found_asset)); + + debug if(debug_channel.market) D.print("calculating royalty"); + + let royalty = if(b_freshmint == false){ + //secondary + switch(Properties.getClassProperty(metadata, Types.metadata.__system)){ + case(null){ + D.print("no system"); + []; + }; + case(?val){ + + switch(Properties.getClassProperty(val.value, Types.metadata.__system_secondary_royalty)){ + case(null){ + debug if(debug_channel.market) D.print("no secondary"); + []; + }; + case(?list){ + switch(list.value){ + case(#Array(the_array)){ + switch(the_array){ + case(#thawed(val)){ + val; + }; + case(#frozen(val)){ + val; + }; + }; + }; + case(_){ + debug if(debug_channel.market) D.print("no Array" # debug_show(list)); + []; + }; + }; + }; + }; + + }; + }; + } else { + //primary + switch(Properties.getClassProperty(metadata, Types.metadata.__system)){ + case(null){ + debug if(debug_channel.market) D.print("no system"); + []; + }; + case(?val){ + switch(Properties.getClassProperty(val.value, Types.metadata.__system_primary_royalty)){ + case(null){ + debug if(debug_channel.market) D.print("no primary" # debug_show(val.value)); + []; + }; + case(?list){ + switch(list.value){ + case(#Array(the_array)){ + switch(the_array){ + case(#thawed(val)){ + val; + }; + case(#frozen(val)){ + val; + }; + }; + }; + case(_){ + debug if(debug_channel.market) D.print("no Array" # debug_show(list)); + []; + }; + }; + }; + }; + + }; + }; + }; + + debug if(debug_channel.market) D.print("royalty is " # debug_show(royalty)); + //note: this code path is always taken since checker.transferSale requires it or errors + //we have included it here so that we can use Nat.sub without fear of underflow + + if(escrow.amount > fee){ + let total = Nat.sub(escrow.amount, fee); + var remaining = Nat.sub(escrow.amount, fee); + + + D.print("calling process royalty" # debug_show((total,remaining))); + remaining := _process_royalties(state, { + var remaining = remaining; + total = total; + fee = fee; + escrow = escrow; + royalty = royalty; + sale_id = null; + broker_id = request.sales_config.broker_id; + original_broker_id = request.sales_config.broker_id; + account_hash = account_hash; + metadata = metadata; + }, caller); + + + D.print("done with royalty" # debug_show((total,remaining))); + + + + + let new_sale_balance = put_sales_balance(state, { + amount = remaining; + seller = verified.found_asset.escrow.seller; + + buyer = verified.found_asset.escrow.buyer; + token = verified.found_asset.escrow.token; + token_id = verified.found_asset.escrow.token_id; + sale_id = null; + lock_to_date = null; + account_hash = account_hash; + }, true); + }; + + return #ok(txn_record); + + + + + + }; + }; + }; + case(_){ + return #err(Types.errors(#nyi, "market_transfer_nft_origyn nyi pricing type async", ?caller)); + }; + }; + + return #err(Types.errors(#nyi, "market_transfer_nft_origyn nyi ", ?caller)); + }; + + //handles royalty distribution + private func _process_royalties(state : StateAccess, request :{ + var remaining: Nat; + total: Nat; + fee: Nat; + account_hash: ?Blob; + royalty: [CandyTypes.CandyValue]; + escrow: Types.EscrowReceipt; + broker_id: ?Principal; + original_broker_id: ?Principal; + sale_id: ?Text; + metadata : CandyTypes.CandyValue; + }, caller: Principal) : Nat{ + + debug if(debug_channel.royalties) D.print("in process royalty" # debug_show(request)); + for(this_item in request.royalty.vals()){ + switch(this_item){ + case(#Class(the_array)){ + debug if(debug_channel.royalties) D.print("getting items from class " # debug_show(this_item)); + + let rate = switch(Properties.getClassProperty(this_item, "rate")){ + case(null){0:Float}; + case(?val){ + switch(val.value){ + case(#Float(val)){ + val; + }; + case(_){0:Float}; + }; + }; + }; + + let tag = switch(Properties.getClassProperty(this_item, "tag")){ + case(null){"other"}; + case(?val){ + switch(val.value){ + case(#Text(val)){ + val; + }; + case(_){"other"}; + }; + }; + }; + + let principal = switch(Properties.getClassProperty(this_item, "account")){ + case(null){ + if(tag == Types.metadata.royalty_network){ + switch(state.state.collection_data.network){ + case(null){[Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")]}; //dev fund + case(?val){[val]}; + }; + } else if(tag == Types.metadata.royalty_node){ + let val = Metadata.get_system_var(request.metadata, Types.metadata.__system_node); + switch(val){ + case(#Empty){[Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")]}; //dev fund + case(#Principal(val)){ + [val] + }; + case(_){ + [Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")] + }; + + }; + } else if(tag == Types.metadata.royalty_originator){ + let val = Metadata.get_system_var(request.metadata, Types.metadata.__system_node); + switch(Metadata.get_system_var(request.metadata, Types.metadata.__system_originator)){ + case(#Empty){[Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")]}; //dev fund + case(#Principal(val)){ + [val] + }; + case(_){ + [Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")] + }; + }; + } else if(tag == Types.metadata.royalty_broker){ + switch(request.broker_id, request.original_broker_id){ + case(null, null){[Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")]}; //dev fund + case(?val, null){[val]}; + case(null, ?val2){[val2]}; + case(?val, ?val2){[val, val2]}; + }; + + } else { + [Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")]; //dev fund + }; + }; //dev fund + case(?val){ + switch(val.value){ + case(#Principal(val)){ + [val]; + }; + case(_){[Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")]}; //dev fund + }; + }; + }; + + debug if(debug_channel.royalties) D.print("have vals" # debug_show((rate, tag, principal))); + + + let total_royalty = (request.total * Int.abs(Float.toInt(rate * 1_000_000)))/1_000_000; + + + debug if(debug_channel.royalties) D.print("test royalty" # debug_show((total_royalty, principal))); + for(this_principal in principal.vals()){ + let this_royalty = (total_royalty / principal.size()); + + if(this_royalty > request.fee){ + request.remaining -= this_royalty; + //royaltyList.add(#principal(principal), this_royalty); + let id = Metadata.add_transaction_record(state, { + token_id = request.escrow.token_id; + index = 0; + txn_type = #royalty_paid { + seller = request.escrow.seller; + buyer = request.escrow.buyer; + token = request.escrow.token; + sale_id = request.sale_id; + amount = this_royalty; + tag = tag; + reciever = #principal(this_principal); + extensible = #Empty; + }; + timestamp = state.get_time(); + }, caller); + + debug if(debug_channel.royalties) D.print("added trx" # debug_show(id)); + + let new_sale_balance = put_sales_balance(state, { + amount = this_royalty; + seller = #principal(this_principal); + buyer = request.escrow.buyer; + token = request.escrow.token; + token_id = request.escrow.token_id; + sale_id = request.sale_id; + lock_to_date = null; + account_hash = request.account_hash; + }, true); + + debug if(debug_channel.royalties) D.print("new_sale_balance" # debug_show(new_sale_balance)); + + + } else { + //can't pay out if less than fee + }; + + }; + + + }; + case(_){}; + + }; + + + + }; + + return request.remaining; + }; + + //handles non-async market functions like starting an auction + public func market_transfer_nft_origyn(state: StateAccess, request : Types.MarketTransferRequest, caller: Principal) : Result.Result { + + debug if(debug_channel.market) D.print("in market_transfer_nft_origyn"); + var metadata = switch(Metadata.get_metadata_for_token(state, request.token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "market_transfer_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + debug if(debug_channel.market) D.print("have metadata"); + + //can't start auction if token is soulbound + if (Metadata.is_soulbound(metadata)) { + return #err(Types.errors(#token_non_transferable, "market_transfer_nft_origyn ", ?caller)); + }; + + let owner = switch( + Metadata.get_nft_owner(metadata)){ + case(#err(err)){ + return #err(Types.errors(err.error, "market_transfer_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + debug if(debug_channel.market) D.print("have owner " # debug_show(owner)); + debug if(debug_channel.market) D.print("the caller" # debug_show(caller)); + + //check to see if there is a current sale going on MKT0018 + + let this_is_minted = Metadata.is_minted(metadata); + debug if(debug_channel.market) D.print(request.token_id # " isminted" # debug_show(this_is_minted)); + if(this_is_minted){ + //this is a minted NFT - only the nft owner or nft manager can sell it + switch(Metadata.is_nft_owner(metadata, #principal(caller))){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn - not an owner of the NFT - minted sale" # err.flag_point, ?caller))}; + case(#ok(val)){ + if(val == false){return #err(Types.errors(#unauthorized_access, "market_transfer_nft_origyn - not an owner of the NFT - minted sale", ?caller))}; + }; + }; + } else { + //this is a staged NFT it can be sold by the canister owner or the canister manager + if(NFTUtils.is_owner_manager_network(state,caller) == false){return #err(Types.errors(#unauthorized_access, "market_transfer_nft_origyn - not an owner of the canister - staged sale ", ?caller))}; + }; + + debug if(debug_channel.market) D.print("have minted " # debug_show(this_is_minted)); + + //look for an existing sale + switch(is_token_on_sale(state, metadata, caller)){ + case(#err(err)){return #err(Types.errors(err.error, "market_transfer_nft_origyn ensure_no_sale " # err.flag_point, ?caller))}; + case(#ok(val)){ + if(val == true){ + return #err(Types.errors(#existing_sale_found, "market_transfer_nft_origyn - sale exists " # request.token_id , ?caller)); + }; + }; + + }; + + debug if(debug_channel.market) D.print("checking pricing"); + + switch(request.sales_config.pricing){ + case(#auction(auction_details)){ + //what does an escrow reciept do for an auction? Place a bid? + //for now ignore + switch(request.sales_config.escrow_receipt){ + case(?val){ + return #err(Types.errors(#nyi, "market_transfer_nft_origyn - handling escrow for auctions NYI", ?caller)) + }; + case(_){}; + }; + + switch(auction_details.ending){ + case(#date(val)){ + if(val <= auction_details.start_date){ + return #err(Types.errors(#improper_interface, "market_transfer_nft_origyn - end date cannot be before start date", ?caller)) + }; + }; + case(#waitForQuiet(val)){ + if(val.date <= auction_details.start_date){ + return #err(Types.errors(#improper_interface, "market_transfer_nft_origyn - end date cannot be before start date", ?caller)) + }; + }; + }; + + + if(this_is_minted == false){ + return #err(Types.errors(#nyi, "cannot auction off a unminted item", ?caller)) + + }; + + let h = SHA256.New(); + h.write(Conversions.valueToBytes(#Text("com.origyn.nft.sale-id"))); + h.write(Conversions.valueToBytes(#Text("token-id"))); + h.write(Conversions.valueToBytes(#Text(request.token_id))); + h.write(Conversions.valueToBytes(#Text("seller"))); + h.write(Conversions.valueToBytes(#Nat(Types.account_hash_uncompressed(owner)))); + h.write(Conversions.valueToBytes(#Text("timestamp"))); + h.write(Conversions.valueToBytes(#Int(state.get_time()))); + let sale_id = Conversions.valueToText(#Bytes(#frozen(h.sum([])))); + + var allow_list : ?Map.Map = null; + switch(auction_details.allow_list) { + case(null){}; + case(?val){ + var new_list = Map.new(); + + for(thisitem in val.vals()){ + Map.set(new_list, Map.phash, thisitem, true); + }; + allow_list := ?new_list; + }; + }; + + var participants = Map.new(); + Map.set(participants, Map.phash, caller, state.get_time()); + + Map.set(state.state.nft_sales, Map.thash, sale_id, { + sale_id = sale_id; + original_broker_id = request.sales_config.broker_id; + broker_id = null; //currently the broker id for a auction doesn't do much. perhaps it should split the broker reward? + token_id = request.token_id; + sale_type = #auction({ + config = request.sales_config.pricing; + var current_bid_amount = 0; + var current_broker_id = request.sales_config.broker_id; + var end_date = switch(auction_details.ending){ + case(#date(theDate)){theDate}; + case(#waitForQuiet(details)){details.date}; + }; + var min_next_bid = auction_details.start_price; + var current_escrow = null; + var wait_for_quiet_count = ?0; + var status = if(state.get_time() >= auction_details.start_date){ + #open; + } else { + #not_started; + }; + var winner = null; + var allow_list = allow_list; + var participants = participants + }); + }); + + //set new owner + debug if(debug_channel.market) D.print("Setting sale id"); + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_current_sale_id, #Text(sale_id)); + + Map.set(state.state.nft_metadata, Map.thash, request.token_id, metadata); + + let this_ledger = switch(Map.get(state.state.nft_ledgers, Map.thash, request.token_id)){ + case(null){ + let newBuf = SB.init(); + Map.set(state.state.nft_ledgers, Map.thash, request.token_id, newBuf); + newBuf; + }; + case(?val){val;}; + }; + + let txn = { + token_id = request.token_id; + index = SB.size(this_ledger); + timestamp = state.get_time(); + txn_type = #sale_opened({ + sale_id = sale_id; + pricing = request.sales_config.pricing; + extensible = #Empty;}); + }; + SB.add(this_ledger, txn); + + return #ok(txn); + + }; + case(_){ + return #err(Types.errors(#nyi, "market_transfer_nft_origyn nyi pricing type", ?caller)); + }; + }; + + return #err(Types.errors(#nyi, "market_transfer_nft_origyn nyi ", ?caller)); + }; + + //refreshes the offers collection + public func refresh_offers_nft_origyn(state: StateAccess, request: ?Types.Account, caller: Principal) : Result.Result{ + + let seller = switch(request){ + case(null){ + #principal(caller); + }; + case(?val){ + if(Types.account_eq(#principal(caller), val)){ + val; + } else { + if(NFTUtils.is_owner_manager_network(state, caller) == false){return #err(Types.errors(#unauthorized_access, "refresh_offerns_nft_origyn - not an owner", ?caller))}; + val; + }; + } + }; + + + + let offers = Map.get>(state.state.offers, account_handler, seller); + let offer_results = Buffer.Buffer(1); + + + debug if(debug_channel.offers) D.print("trying refresh"); + switch(offers){ + case(null){}; + case(?found_offer){ + + for(this_buyer in Map.entries(found_offer)){ + var b_keep = false; + switch(Map.get(state.state.escrow_balances, account_handler,this_buyer.0)){ + case(null){}; + case(?found_buyer){ + switch(Map.get(found_buyer, account_handler, seller)){ + case(null){}; + case(?found_seller){ + for(this_token in Map.entries(found_seller)){ + for(this_ledger in Map.entries(this_token.1)){ + //nyi: maybe check for a 0 balance + debug if(debug_channel.offers) D.print("found bkeep" # debug_show(this_ledger)); + b_keep := true; + offer_results.add(this_ledger.1); + }; + }; + }; + }; + }; + }; + if(b_keep == false){ + let clean = Map.delete(found_offer, account_handler, this_buyer.0); + Map.set>(state.state.offers, account_handler, seller, found_offer); + }; + }; + }; + }; + + if(offer_results.size() == 0){ + Map.delete>(state.state.offers, account_handler, seller); + }; + + return #ok(#refresh_offers(offer_results.toArray())); + }; + + //moves tokens from a deposit into an escrow + public func escrow_nft_origyn(state: StateAccess, request : Types.EscrowRequest, caller: Principal) : async Result.Result { + //can someone escrow for someone else? No. Only a buyer can create an escrow for themselves for now + //we will also allow a canister/canister owner to create escrows for itself + if(Types.account_eq(#principal(caller), request.deposit.buyer) == false and + Types.account_eq(#principal(caller), #principal(state.canister())) == false and + Types.account_eq(#principal(caller), #principal(state.state.collection_data.owner)) == false and + Array.filter(state.state.collection_data.managers, func(item: Principal){item == caller}).size() == 0){ + return #err(Types.errors(#unauthorized_access, "escrow_nft_origyn - escrow - buyer and caller do not match", ?caller)); + }; + + debug if(debug_channel.escrow) D.print("in escrow"); + debug if(debug_channel.escrow) D.print(debug_show(request)); + switch(request.lock_to_date){ + case(?val){ + if(val > state.get_time() *10){ // if an extra digit is fat fingered this will trip....gives 474 years in the future as the max + return #err(Types.errors(#improper_interface, "escrow_nft_origyn time lock should not be that far in the future", ?caller)); + + }; + }; + case(null){}; + }; + + + debug if(debug_channel.escrow) D.print(debug_show(state.canister())); + + //verify the token + if(request.token_id != ""){ + let metadata = switch(Metadata.get_metadata_for_token(state, request.token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "escrow_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + let this_is_minted = Metadata.is_minted(metadata); + if(this_is_minted == false){ + //cant escrow for an unminted item + return #err(Types.errors(#token_not_found, "escrow_nft_origyn ", ?caller)); + }; + + let owner = switch( + Metadata.get_nft_owner(metadata)){ + case(#err(err)){ + return #err(Types.errors(err.error, "escrow_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + if(owner != request.deposit.seller){ + //cant escrow for an owner that doesn't own the token + return #err(Types.errors(#escrow_owner_not_the_owner, "escrow_nft_origyn cannot create escrow for item someone does not own", ?caller)); + } + }; + + + + + //move the deposit to an escrow account + debug if(debug_channel.escrow) D.print("verifying the deposit"); + + let (trx_id : Types.TransactionID, account_hash : ?Blob) = switch(request.deposit.token){ + case(#ic(token)){ + switch(token.standard){ + case(#Ledger){ + debug if(debug_channel.escrow) D.print("found ledger"); + let checker = Ledger_Interface.Ledger_Interface(); + switch(await checker.transfer_deposit(state.canister(), request, caller)){ + case(#ok(val)){ + (val.transaction_id, ?val.subaccount_info.account.sub_account); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "escrow_nft_origyn " # err.flag_point, ?caller)); + }; + }; + + }; + case(_){ + return #err(Types.errors(#nyi, "escrow_nft_origyn - ic type nyi - " # debug_show(request), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "escrow_nft_origyn - extensible token nyi - " # debug_show(request), ?caller)); + }; + }; + + //put the escrow + debug if(debug_channel.escrow) D.print("putting the escrow"); + let escrow_result = put_escrow_balance(state, { + seller = request.deposit.seller; + buyer = request.deposit.buyer; + token = request.deposit.token; + token_id = request.token_id; + amount = request.deposit.amount; + trx_id = trx_id; + sale_id = request.deposit.sale_id; + lock_to_date = request.lock_to_date; + account_hash = account_hash; + balances = null; + }, true); + + debug if(debug_channel.escrow) D.print(debug_show(escrow_result)); + + + //add deposit transaction + let new_trx = switch(Metadata.add_transaction_record(state,{ + token_id = request.token_id; + index = 0; + txn_type = #escrow_deposit { + seller = request.deposit.seller; + buyer = request.deposit.buyer; + token = request.deposit.token; + token_id = request.token_id; + amount = request.deposit.amount; + trx_id = trx_id; + sale_id = request.deposit.sale_id; + extensible = #Empty; + }; + timestamp = state.get_time(); + }, caller)) { + case(#err(err)){ + debug if(debug_channel.escrow) D.print("in a bad error"); + debug if(debug_channel.escrow) D.print(debug_show(err)); + //nyi: this is really bad and will mess up certificatioin later so we should really throw + return #err(Types.errors(#nyi, "escrow_nft_origyn - extensible token nyi - " # debug_show(request), ?caller)); + }; + case(#ok(new_trx)){new_trx}; + }; + + debug if(debug_channel.escrow) D.print("have the trx"); + debug if(debug_channel.escrow) D.print(debug_show(new_trx)); + return #ok({ + receipt = { + seller = request.deposit.seller; + buyer = request.deposit.buyer; + token_id = request.token_id; + token = request.deposit.token; + amount = request.deposit.amount; + sale_id = request.deposit.sale_id; + }; + balance = escrow_result.amount; + transaction = new_trx; + }); + + + }; + + //allows the user to withdraw tokens from an nft canister + public func withdraw_nft_origyn(state: StateAccess, withdraw: Types.WithdrawRequest, caller: Principal) : async Result.Result { + switch(withdraw){ + case(#deposit(details)){ + D.print("in deposit withdraw"); + debug if(debug_channel.withdraw_deposit) D.print("an escrow withdraw"); + debug if(debug_channel.withdraw_deposit) D.print(debug_show(withdraw)); + if(caller != state.canister() and Types.account_eq(#principal(caller), details.buyer) == false){ + //cant withdraw for someone else + return #err(Types.errors(#unauthorized_access, "withdraw_nft_origyn - escrow - buyer and caller do not match" , ?caller)); + }; + + debug if(debug_channel.withdraw_deposit) D.print("about to verify"); + + + let deposit_account = NFTUtils.get_deposit_info(details.buyer, state.canister()); + + + //NFT-112 + let fee = switch(details.token){ + case(#ic(token)){ + + if(details.amount <= token.fee){ + return #err(Types.errors(#withdraw_too_large, "withdraw_nft_origyn - escrow - withdraw fee is larger than amount" , ?caller)); + }; + token.fee; + }; + case(_){return #err(Types.errors(#nyi, "withdraw_nft_origyn - escrow - extensible token nyi - " # debug_show(details), ?caller)); + }; + }; + + //attempt to send payment + debug if(debug_channel.withdraw_deposit) D.print("sending payment" # debug_show((details.withdraw_to, details.amount, caller))); + var transaction_id : ?{trx_id: Types.TransactionID; fee: Nat} = null; + + transaction_id := switch(details.token){ + case(#ic(token)){ + switch(token.standard){ + case(#Ledger){ + //D.print("found ledger"); + let checker = Ledger_Interface.Ledger_Interface(); + + debug if(debug_channel.withdraw_deposit) D.print("returning amount " # debug_show(details.amount, token.fee)); + + try{ + switch(await checker.send_payment_minus_fee(details.withdraw_to, token, details.amount, ?deposit_account.account.sub_account, caller)){ + case(#ok(val)){ + //D.print("Got a val and it it is"); + ?val; + }; + case(#err(err)){ + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - ledger payment failed err branch " # err.flag_point, ?caller)); + }; + }; + } catch (e){ + + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - ledger payment failed catch branch " # Error.message(e), ?caller)); + + }; + + }; + case(_){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - escrow - - ledger type nyi - " # debug_show(details), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - escrow - - token standard nyi - " # debug_show(details), ?caller)); + }; + }; + + + debug if(debug_channel.withdraw_deposit) D.print("succesful transaction :" # debug_show(transaction_id) # debug_show(details)); + + switch(transaction_id){ + case(null){ + //really should have failed already + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - payment failed txid null" , ?caller)); + }; + case(?transaction_id){ + switch(Metadata.add_transaction_record(state,{ + token_id = ""; + index = 0; + txn_type = #deposit_withdraw({ + buyer = details.buyer; + amount = details.amount - transaction_id.fee; + fee = transaction_id.fee; + token = details.token; + trx_id = transaction_id.trx_id; + extensible = #Empty; + } + ); + timestamp = state.get_time(); + }, caller)) { + case(#ok(val)){ + return #ok(val); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "withdraw_nft_origyn - escrow - ledger not updated" # debug_show(transaction_id) , ?caller)); + }; + }; + + + + }; + }; + + + + + }; + case(#escrow(details)){ + debug if(debug_channel.withdraw_escrow) D.print("an escrow withdraw"); + debug if(debug_channel.withdraw_escrow) D.print(debug_show(withdraw)); + if(caller != state.canister() and Types.account_eq(#principal(caller), details.buyer) == false){ + //cant withdraw for someone else + return #err(Types.errors(#unauthorized_access, "withdraw_nft_origyn - escrow - buyer and caller do not match" , ?caller)); + }; + debug if(debug_channel.withdraw_escrow) D.print("about to verify"); + let verified = verify_escrow_reciept(state, details, null, null); + + + switch(verified){ + case(#ok(verified)){ + + let account_info = NFTUtils.get_escrow_account_info(verified.found_asset.escrow, state.canister()); + if(verified.found_asset.escrow.amount < details.amount){ + debug if(debug_channel.withdraw_escrow) D.print("in check amount " # debug_show(verified.found_asset.escrow.amount) # " " # debug_show( details.amount)); + return #err(Types.errors(#withdraw_too_large, "withdraw_nft_origyn - escrow - withdraw too large" , ?caller)); + }; + + let a_ledger = verified.found_asset.escrow; + + switch(a_ledger.lock_to_date){ + case(?val){ + debug if(debug_channel.withdraw_escrow) D.print("found a lock date " # debug_show((val, state.get_time()))); + if(state.get_time() < val){ + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - escrow - this escrow is locked until " # debug_show(val) , ?caller)); + }; + }; + case(null){ + debug if(debug_channel.withdraw_escrow) D.print("no lock date " # debug_show(( state.get_time()))); + }; + }; + + //NFT-112 + let fee = switch(details.token){ + case(#ic(token)){ + + if(a_ledger.amount <= token.fee){ + return #err(Types.errors(#withdraw_too_large, "withdraw_nft_origyn - escrow - withdraw fee is larger than amount" , ?caller)); + + }; + token.fee; + }; + case(_){return #err(Types.errors(#nyi, "withdraw_nft_origyn - escrow - extensible token nyi - " # debug_show(details), ?caller)); + }; + }; + + //D.print("got to sale id"); + + switch(a_ledger.sale_id){ + case(?sale_id){ + //check that the owner isn't still the bidder in the sale + switch(Map.get(state.state.nft_sales, Map.thash,sale_id)){ + case(null){ + //should be an error because sale id can't be found + return #err(Types.errors(#sale_not_found, "withdraw_nft_origyn - escrow - can't find sale top" # debug_show(a_ledger) # " " # debug_show(withdraw) , ?caller)); + }; + case(?val){ + + + + debug if(debug_channel.withdraw_escrow) D.print("testing current state"); + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(val)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(Types.errors(err.error, "withdraw_nft_origyn - escrow - find state " # err.flag_point, ?caller)); + }; + }; + + switch(current_sale_state.status){ + case(#open){ + + D.print(debug_show(current_sale_state)); + D.print(debug_show(caller)); + + //NFT-110 + switch(current_sale_state.winner){ + case(?val){ + debug if(debug_channel.withdraw_escrow) D.print("found a winner"); + if(Types.account_eq(val, details.buyer)){ + debug if(debug_channel.withdraw_escrow) D.print("should be throwing an error"); + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - escrow - you are the winner" , ?caller)); + }; + }; + case(null){ + debug if(debug_channel.withdraw_escrow) D.print("not a winner"); + }; + }; + + //NFT-76 + switch(current_sale_state.current_escrow){ + case(?val){ + debug if(debug_channel.withdraw_escrow) D.print("testing current escorw"); + debug if(debug_channel.withdraw_escrow) D.print(debug_show(val.buyer)); + if(Types.account_eq(val.buyer, details.buyer)){ + D.print("passed"); + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - escrow - you are the current bid" , ?caller)); + }; + }; + case(nul){ + debug if(debug_channel.withdraw_escrow) D.print("not a current escrow"); + }; + }; + }; + case(_){ + //it isn't open so we don't need to check + }; + }; + }; + }; + }; + case(null){ + + }; + }; + + debug if(debug_channel.withdraw_escrow) D.print("finding target escrow"); + debug if(debug_channel.withdraw_escrow) D.print(debug_show(a_ledger.amount)); + debug if(debug_channel.withdraw_escrow) D.print(debug_show(details.amount)); + //ok...so we should be good to withdraw + //first update the escrow + if(verified.found_asset.escrow.amount < details.amount){ + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - escrow - amount too large ", ?caller)); + }; + + let target_escrow = { + account_hash = verified.found_asset.escrow.account_hash; + balances = null; + amount = Nat.sub(verified.found_asset.escrow.amount, details.amount); + buyer = details.buyer; + seller = details.seller; + token_id = details.token_id; + token = details.token; + sale_id = a_ledger.sale_id; + lock_to_date = a_ledger.lock_to_date; + }; + + if(target_escrow.amount > 0){ + Map.set(verified.found_asset_list, token_handler, details.token, target_escrow); + } else { + Map.delete(verified.found_asset_list, token_handler, details.token); + }; + + + //send payment + //reentrancy risk so we remove the escrow value above before calling + debug if(debug_channel.withdraw_escrow) D.print("sending payment" # debug_show((details.withdraw_to, details.amount, caller))); + var transaction_id : ?{trx_id: Types.TransactionID; fee: Nat} = null; + + transaction_id := switch(details.token){ + case(#ic(token)){ + switch(token.standard){ + case(#Ledger){ + //D.print("found ledger"); + let checker = Ledger_Interface.Ledger_Interface(); + + debug if(debug_channel.withdraw_escrow) D.print("returning amount " # debug_show(details.amount, token.fee)); + + try{ + switch(await checker.send_payment_minus_fee(details.withdraw_to, token, details.amount, ?account_info.account.sub_account, caller)){ + case(#ok(val)){ + //D.print("Got a val and it it is"); + ?val; + }; + case(#err(err)){ + switch(verify_escrow_reciept(state, details, null, null)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, details.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(reverify.found_asset_list, token_handler, details.token, target_escrow); + + + }; + case(#err(err)){ + let target_escrow = { + account_hash = a_ledger.account_hash; + amount = details.amount; + buyer = a_ledger.buyer; + seller = a_ledger.seller; + token_id = a_ledger.token_id; + token = a_ledger.token; + sale_id = a_ledger.sale_id; + lock_to_date = a_ledger.lock_to_date; + }; + + + Map.set(verified.found_asset_list, token_handler, details.token, target_escrow); + } + }; + + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - ledger payment failed err branch " # err.flag_point, ?caller)); + }; + }; + } catch (e){ + //put the escrow back because something went wrong + switch(verify_escrow_reciept(state, details, null, null)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, details.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(reverify.found_asset_list, token_handler, details.token, target_escrow); + + + }; + case(#err(err)){ + let target_escrow = { + account_hash = a_ledger.account_hash; + amount = details.amount; + buyer = a_ledger.buyer; + seller = a_ledger.seller; + token_id = a_ledger.token_id; + token = a_ledger.token; + sale_id = a_ledger.sale_id; + lock_to_date = a_ledger.lock_to_date; + }; + + + Map.set(verified.found_asset_list, token_handler, details.token, target_escrow); + } + }; + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - ledger payment failed catch branch " # Error.message(e), ?caller)); + + }; + + }; + case(_){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - escrow - - ledger type nyi - " # debug_show(details), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - escrow - - token standard nyi - " # debug_show(details), ?caller)); + }; + }; + + + debug if(debug_channel.withdraw_escrow) D.print("succesful transaction :" # debug_show(transaction_id) # debug_show(details)); + + switch(transaction_id){ + case(null){ + //really should have failed already + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - payment failed txid null" , ?caller)); + }; + case(?transaction_id){ + switch(Metadata.add_transaction_record(state,{ + token_id = details.token_id; + index = 0; + txn_type = #escrow_withdraw({ + buyer = details.buyer; + amount = details.amount - transaction_id.fee; + fee = transaction_id.fee; + token = details.token; + seller = details.seller; + token_id = details.token_id; + trx_id = transaction_id.trx_id; + extensible = #Empty; + } + ); + timestamp = state.get_time(); + }, caller)) { + case(#ok(val)){ + return #ok(val); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "withdraw_nft_origyn - escrow - ledger not updated" # debug_show(transaction_id) , ?caller)); + }; + }; + + + + }; + }; + + }; + case(#err(err)){ + debug if(debug_channel.withdraw_escrow) D.print("an error"); + debug if(debug_channel.withdraw_escrow) D.print(debug_show(err)); + return #err(Types.errors(err.error, "withdraw_nft_origyn - escrow - - cannot verify escrow - " # debug_show(details), ?caller));}; + }; + + + }; + case(#sale(details)){ + debug if(debug_channel.withdraw_sale) D.print("withdrawing a sale"); + debug if(debug_channel.withdraw_sale) D.print(debug_show(details)); + debug if(debug_channel.withdraw_sale) D.print(debug_show(caller)); + if(Types.account_eq(#principal(caller), details.seller) == false){ + //cant withdraw for someone else + //D.print("can't withdraw for someone else"); + return #err(Types.errors(#unauthorized_access, "withdraw_nft_origyn - sales- buyer and caller do not match" # debug_show((#principal(caller), details.seller)) , ?caller)); + }; + + let verified = verify_sales_reciept(state, details); + + switch(verified){ + case(#ok(verified)){ + debug if(debug_channel.withdraw_sale) D.print("have verified"); + + if(verified.found_asset.escrow.amount < details.amount){ + return #err(Types.errors(#withdraw_too_large, "withdraw_nft_origyn - sales - withdraw too large" , ?caller)); + }; + + let a_ledger = verified.found_asset.escrow; + + debug if(debug_channel.withdraw_sale) D.print("a_ledger" # debug_show(a_ledger)); + + let a_token_id = verified.found_asset_list; + + //let account_info = NFTUtils.get_sale_account_info({verified.found_asset.escrow, state.canister()); + + + //NFT-112 + switch(details.token){ + case(#ic(token)){ + + if(a_ledger.amount <= token.fee){ + debug if(debug_channel.withdraw_sale) D.print("withdraw fee"); + return #err(Types.errors(#withdraw_too_large, "withdraw_nft_origyn - sales - withdraw fee is larger than amount" , ?caller)); + + }; + }; + case(_){D.print("nyi err"); + return #err( + + Types.errors(#nyi, "withdraw_nft_origyn - sales - extensible token nyi - " # debug_show(details), ?caller)); + }; + }; + + + debug if(debug_channel.withdraw_sale) D.print("finding target escrow"); + debug if(debug_channel.withdraw_sale) D.print(debug_show(a_ledger.amount)); + debug if(debug_channel.withdraw_sale) D.print(debug_show(details.amount)); + //ok...so we should be good to withdraw + //first update the escrow + if(verified.found_asset.escrow.amount < details.amount){ + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - sale - amount too large ", ?caller)); + + }; + + + let target_escrow = { + account_hash = a_ledger.account_hash; + amount = Nat.sub(a_ledger.amount, details.amount); + buyer = a_ledger.buyer; + seller = a_ledger.seller; + token_id = a_ledger.token_id; + token = a_ledger.token; + sale_id = a_ledger.sale_id; + lock_to_date = a_ledger.lock_to_date; + }; + + if(target_escrow.amount > 0){ + Map.set(a_token_id, token_handler, details.token, target_escrow); + } else { + Map.delete(a_token_id, token_handler, details.token); + }; + + + //send payment + debug if(debug_channel.withdraw_sale) D.print("sending payment"); + var transaction_id : ?{trx_id: Types.TransactionID; fee: Nat} = null; + + transaction_id := switch(details.token){ + case(#ic(token)){ + switch(token.standard){ + + case(#Ledger){ + debug if(debug_channel.withdraw_sale) D.print("found ledger sale withdraw"); + let checker = Ledger_Interface.Ledger_Interface(); + //if this fails we need to put the escrow back + try{ + switch(await checker.send_payment_minus_fee(details.withdraw_to, token, details.amount, a_ledger.account_hash, caller)){ + case(#ok(val)){ + //D.print("Got a val and it it is"); + ?val; + }; + case(#err(err)){ + //put the escrow back + debug if(debug_channel.withdraw_sale) D.print("failed, putting back ledger"); + switch(verify_sales_reciept(state, details)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, details.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(a_token_id, token_handler, details.token, target_escrow); + + + }; + case(#err(err)){ + + //put the escrow back + let target_escrow = { + account_hash = a_ledger.account_hash; + amount = details.amount; + buyer = a_ledger.buyer; + seller = a_ledger.seller; + token_id = a_ledger.token_id; + token = a_ledger.token; + sale_id = a_ledger.sale_id; + lock_to_date = a_ledger.lock_to_date; + }; + + + Map.set(a_token_id, token_handler, details.token, target_escrow); + } + }; + + return #err(Types.errors(#sales_withdraw_payment_failed, "withdraw_nft_origyn - sales ledger payment failed err branch" # err.flag_point, ?caller)); + }; + }; + } catch(e){ + //put the escrow back + switch(verify_sales_reciept(state, details)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, details.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(a_token_id, token_handler, details.token, target_escrow); + + + }; + case(#err(err)){ + let target_escrow = { + account_hash = a_ledger.account_hash; + amount = details.amount; + buyer = a_ledger.buyer; + seller = a_ledger.seller; + token_id = a_ledger.token_id; + token = a_ledger.token; + sale_id = a_ledger.sale_id; + lock_to_date = a_ledger.lock_to_date; + }; + + + Map.set(a_token_id, token_handler, details.token, target_escrow); + } + }; + + return #err(Types.errors(#sales_withdraw_payment_failed, "withdraw_nft_origyn - sales ledger payment failed catch branch" # Error.message(e), ?caller)); + }; + }; + case(_){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - sales - ledger type nyi - " # debug_show(details), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - sales - extensible token nyi - " # debug_show(details), ?caller)); + }; + }; + + //D.print("have a transactionid and will crate a transaction"); + switch(transaction_id){ + case(null){ + //really should have failed already + return #err(Types.errors(#sales_withdraw_payment_failed, "withdraw_nft_origyn - sales payment failed txid null" , ?caller)); + }; + case(?transaction_id){ + switch(Metadata.add_transaction_record(state,{ + token_id = details.token_id; + index = 0; + txn_type = #sale_withdraw({ + buyer = details.buyer; + amount = details.amount - transaction_id.fee; + fee = transaction_id.fee; + token = details.token; + seller = details.seller; + token_id = details.token_id; + trx_id = transaction_id.trx_id; + extensible = #Empty; + } + ); + timestamp = state.get_time(); + }, caller)) { + case(#ok(val)){ + //D.print("we did it"); + //D.print(debug_show(val)); + return #ok(val); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "withdraw_nft_origyn - sales ledger not updated" # debug_show(transaction_id) , ?caller)); + }; + }; + + }; + }; + + }; + + case(#err(err)){ + debug if(debug_channel.withdraw_sale) D.print("an error"); + debug if(debug_channel.withdraw_sale) D.print(debug_show(err)); + return #err(Types.errors(err.error, "withdraw_nft_origyn - sale - - cannot verify escrow - " # debug_show(details), ?caller)); + }; + + }; + + }; + case(#reject(details)){ + // rejects and offer and sends the tokens back to the source + debug if(debug_channel.withdraw_reject) D.print("an escrow reject"); + if(caller != state.canister() and Types.account_eq(#principal(caller), details.seller) == false and ?caller != state.state.collection_data.network){ + //cant withdraw for someone else + debug if(debug_channel.withdraw_reject) D.print(debug_show((caller, state.canister(), details.seller, state.state.collection_data.network))); + return #err(Types.errors(#unauthorized_access, "withdraw_nft_origyn - reject - unauthorized" , ?caller)); + }; + + debug if(debug_channel.withdraw_reject) D.print("about to verify"); + let verified = verify_escrow_reciept(state, { + amount = 0; + buyer = details.buyer; + seller = details.seller; + token = details.token; + token_id = details.token_id + }, null, null); + + + switch(verified){ + case(#ok(verified)){ + + let account_info = NFTUtils.get_escrow_account_info(verified.found_asset.escrow, state.canister()); + + let a_ledger = verified.found_asset.escrow; + + // reject ignores locked assets + + //NFT-112 + let fee = switch(details.token){ + case(#ic(token)){ + + if(a_ledger.amount <= token.fee){ + return #err(Types.errors(#withdraw_too_large, "withdraw_nft_origyn - reject - withdraw fee is larger than amount" , ?caller)); + + }; + token.fee; + }; + case(_){return #err(Types.errors(#nyi, "withdraw_nft_origyn - reject - extensible token nyi - " # debug_show(details), ?caller)); + }; + }; + + debug if(debug_channel.withdraw_reject) D.print("got to sale id"); + + switch(a_ledger.sale_id){ + case(?sale_id){ + //check that the owner isn't still the bidder in the sale + switch(Map.get(state.state.nft_sales, Map.thash,sale_id)){ + case(null){ + //nyi: should be an error because sale id can't be found but do nothing for now + return #err(Types.errors(#sale_not_found, "withdraw_nft_origyn - reject - can't find sale top" # debug_show(a_ledger) # " " # debug_show(withdraw) , ?caller)); + }; + case(?val){ + + debug if(debug_channel.withdraw_reject) D.print("testing current state"); + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(val)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(Types.errors(err.error, "withdraw_nft_origyn - reject - find state " # err.flag_point, ?caller)); + }; + }; + + switch(current_sale_state.status){ + case(#open){ + + debug if(debug_channel.withdraw_reject) D.print(debug_show(current_sale_state)); + debug if(debug_channel.withdraw_reject) D.print(debug_show(caller)); + + //NFT-110 + switch(current_sale_state.winner){ + case(?val){ + debug if(debug_channel.withdraw_reject) D.print("found a winner"); + if(Types.account_eq(val, details.buyer)){ + debug if(debug_channel.withdraw_reject) D.print("should be throwing an error"); + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - reject - you are the winner" , ?caller)); + }; + }; + case(null){ + debug if(debug_channel.withdraw_reject) D.print("not a winner"); + }; + }; + + //NFT-76 + switch(current_sale_state.current_escrow){ + case(?val){ + debug if(debug_channel.withdraw_reject) D.print("testing current escorw"); + debug if(debug_channel.withdraw_reject) D.print(debug_show(val.buyer)); + if(Types.account_eq(val.buyer, details.buyer)){ + debug if(debug_channel.withdraw_reject) D.print("passed"); + return #err(Types.errors(#escrow_cannot_be_removed, "withdraw_nft_origyn - reject - you are the current bid" , ?caller)); + }; + }; + case(nul){ + debug if(debug_channel.withdraw_reject) D.print("not a current escrow"); + }; + }; + }; + case(_){ + //it isn't open so we don't need to check + }; + }; + }; + }; + }; + case(null){ + + }; + }; + + debug if(debug_channel.withdraw_reject) D.print("finding target escrow"); + debug if(debug_channel.withdraw_reject) D.print(debug_show(a_ledger.amount)); + + //ok...so we should be good to withdraw + //first update the escrow + + //deleteing the asset + Map.delete(verified.found_asset_list, token_handler, details.token); + + //send payment + + var transaction_id : ?{trx_id: Types.TransactionID; fee: Nat} = null; + try{ + transaction_id := switch(details.token){ + case(#ic(token)){ + switch(token.standard){ + case(#Ledger){ + //D.print("found ledger"); + let checker = Ledger_Interface.Ledger_Interface(); + + debug if(debug_channel.withdraw_reject) D.print("returning amount " # debug_show(verified.found_asset.escrow.amount, token.fee)); + + switch(await checker.send_payment_minus_fee(details.buyer, token, verified.found_asset.escrow.amount, ?account_info.account.sub_account, caller)){ + case(#ok(val)){ + //D.print("Got a val and it it is"); + ?val; + }; + case(#err(err)){ + //put the escrow back + //make sure things havent changed in the mean time + //D.print("failed, putting back ledger"); + switch(verify_escrow_reciept(state, a_ledger, null, null)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, a_ledger.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(reverify.found_asset_list, token_handler, verified.found_asset.token_spec, target_escrow); + + + }; + case(#err(err)){ + let putback = put_escrow_balance(state, a_ledger, true); + }; + }; + + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - reject - ledger payment failed" # err.flag_point, ?caller)); + }; + }; + + }; + case(_){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - reject - - ledger type nyi - " # debug_show(details), ?caller)); + }; + }; + }; + case(#extensible(val)){ + return #err(Types.errors(#nyi, "withdraw_nft_origyn - reject - - token standard nyi - " # debug_show(details), ?caller)); + }; + }; + } catch (e){ + //something failed, put the escrow back + //make sure it hasn't changed in the mean time + //D.print("failed, putting back throw"); + switch(verify_escrow_reciept(state, a_ledger, null, null)){ + case(#ok(reverify)){ + let target_escrow = { + account_hash = reverify.found_asset.escrow.account_hash; + amount = Nat.add(reverify.found_asset.escrow.amount, a_ledger.amount); + buyer = reverify.found_asset.escrow.buyer; + seller = reverify.found_asset.escrow.seller; + token_id = reverify.found_asset.escrow.token_id; + token = reverify.found_asset.escrow.token; + sale_id = reverify.found_asset.escrow.sale_id; + lock_to_date = reverify.found_asset.escrow.lock_to_date; + }; + + + Map.set(reverify.found_asset_list, token_handler, verified.found_asset.token_spec, target_escrow); + + + }; + case(#err(err)){ + let putback = put_escrow_balance(state, a_ledger, true); + }; + }; + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - reject - payment failed" # Error.message(e) , ?caller)); + }; + + debug if(debug_channel.withdraw_reject) D.print("succesful transaction :" # debug_show(transaction_id) # debug_show(details)); + + switch(transaction_id){ + case(null){ + //really should have failed already + return #err(Types.errors(#escrow_withdraw_payment_failed, "withdraw_nft_origyn - escrow - payment failed txid null" , ?caller)); + }; + case(?transaction_id){ + switch(Metadata.add_transaction_record(state,{ + token_id = details.token_id; + index = 0; + txn_type = #escrow_withdraw({ + buyer = details.buyer; + amount = verified.found_asset.escrow.amount - transaction_id.fee; + fee = transaction_id.fee; + token = details.token; + seller = details.seller; + token_id = details.token_id; + trx_id = transaction_id.trx_id; + extensible = #Empty; + } + ); + timestamp = state.get_time(); + }, caller)) { + case(#ok(val)){ + return #ok(val); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "withdraw_nft_origyn - escrow - ledger not updated" # debug_show(transaction_id) , ?caller)); + }; + }; + + + + }; + }; + + }; + case(#err(err)){ + debug if(debug_channel.withdraw_reject) D.print("an error"); + debug if(debug_channel.withdraw_reject) D.print(debug_show(err)); + return #err(Types.errors(err.error, "withdraw_nft_origyn - escrow - - cannot verify escrow - " # debug_show(details), ?caller));}; + }; + + + }; + }; + return #err(Types.errors(#nyi, "withdraw_nft_origyn - nyi - " , ?caller)); + }; + + //allows bids on auctons + public func bid_nft_origyn(state: StateAccess, request : Types.BidRequest, caller: Principal) : async Result.Result { + + + //look for an existing sale + let current_sale = + switch(Map.get(state.state.nft_sales, Map.thash,request.sale_id)){ + case(?status){status;}; + case(null){return #err(Types.errors(#sale_id_does_not_match, "bid_nft_origyn - sales id did not match " # request.sale_id, ?caller));}; + }; + + let current_sale_state = switch(NFTUtils.get_auction_state_from_status(current_sale)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(Types.errors(err.error, "bid_nft_origyn - find state " # err.flag_point, ?caller)); + }; + }; + + let current_pricing = switch(current_sale_state.config){ + case(#auction(config)){ + config; + }; + case(_){ + return #err(Types.errors(#sale_not_found, "bid_nft_origyn - not an auction type ", ?caller)); + + }; + }; + + switch(current_sale_state.status){ + case(#open){ + if(state.get_time() >= current_sale_state.end_date){ + //current_sale_state.status := #closed; + return #err(Types.errors(#auction_ended, "bid_nft_origyn - sale is past close date " # request.sale_id, ?caller)); + }; + }; + case(#not_started){ + if(state.get_time() >= current_pricing.start_date and state.get_time() < current_sale_state.end_date){ + current_sale_state.status := #open; + }; + }; + case(_){return #err(Types.errors(#auction_ended, "bid_nft_origyn - sale is not open " # request.sale_id, ?caller));}; + }; + + + switch(current_sale_state.allow_list){ + case(null){ + debug if(debug_channel.bid) D.print("allow list is null"); + }; + case(?val){ + debug if(debug_channel.bid) D.print("allow list inst null"); + switch(Map.get(val, Map.phash, caller)){ + case(null){return #err(Types.errors(#unauthorized_access, "bid_nft_origyn - not on allow list ", ?caller))}; + case(?val){} + }; + }; + }; + + + var metadata = switch(Metadata.get_metadata_for_token(state,request.escrow_receipt.token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "bid_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + let owner = switch(Metadata.get_nft_owner(metadata)){ + case(#err(err)){ + return #err(Types.errors(err.error, "bid_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + + + //make sure token ids match + if(current_sale.token_id != request.escrow_receipt.token_id){ + return #err(Types.errors(#token_id_mismatch, "bid_nft_origyn - token id of sale does not match escrow receipt " # request.escrow_receipt.token_id, ?caller)); + }; + + //make sure assets match + debug if(debug_channel.bid) D.print("checking asset sale type " # debug_show((_get_token_from_sales_status(current_sale), request.escrow_receipt.token))); + if(Types.token_eq(_get_token_from_sales_status(current_sale), request.escrow_receipt.token) == false){ + return #err(Types.errors(#asset_mismatch, "bid_nft_origyn - asset in sale and escrow receipt do not match " # debug_show(request.escrow_receipt.token) # debug_show(_get_token_from_sales_status(current_sale)), ?caller)); + }; + + //make sure owners match + if(Types.account_eq(owner, request.escrow_receipt.seller) == false){ + return #err(Types.errors(#receipt_data_mismatch, "bid_nft_origyn - owner and seller do not match " # debug_show(request.escrow_receipt.token) # debug_show(_get_token_from_sales_status(current_sale)), ?caller)); + }; + + //make sure buyers match + if(Types.account_eq(#principal(caller), request.escrow_receipt.buyer) == false){ + return #err(Types.errors(#receipt_data_mismatch, "bid_nft_origyn - caller and buyer do not match " # debug_show(request.escrow_receipt.token) # debug_show(_get_token_from_sales_status(current_sale)), ?caller)); + }; + + //make sure the receipt is valid + debug if(debug_channel.bid) D.print("verifying Escrow"); + let verified = switch(verify_escrow_reciept(state, request.escrow_receipt, null, ?request.sale_id)){ + case(#err(err)){return #err(Types.errors(err.error, "bid_nft_origyn verifying escrow " # err.flag_point, ?caller))}; + case(#ok(res)){ + res; + }; + }; + + if(verified.found_asset.escrow.amount < request.escrow_receipt.amount){ + //D.print("in check amount"); + return #err(Types.errors(#withdraw_too_large, "bid_nft_origyn - escrow - amount more than in escrow verified: " # Nat.toText(verified.found_asset.escrow.amount) # " request: " # Nat.toText(request.escrow_receipt.amount) , ?caller)); + + }; + + + + //make sure auction is still running + let current_time = state.get_time(); + if(state.get_time() > current_sale_state.end_date){ + // MKT0028 + return #err(Types.errors(#auction_ended, "bid_nft_origyn - auction ended current_date" # debug_show(current_time) # " " # " end_time:" # debug_show(current_sale_state.end_date), ?caller)); + + }; + + switch(current_sale_state.status){ + case(#closed){ + //we will close later after we try to refund a valid bid + debug if(debug_channel.bid) D.print("refunding closed " # debug_show(verified.found_asset.escrow.amount)); + let service : Types.Service = actor((Principal.toText(state.canister()))); + let refund_id = service.sale_nft_origyn(#withdraw( + #escrow({ + amount = verified.found_asset.escrow.amount; //return back the whole escrow + buyer = request.escrow_receipt.buyer; + seller = request.escrow_receipt.seller; + token = request.escrow_receipt.token; + token_id = request.escrow_receipt.token_id; + withdraw_to = request.escrow_receipt.buyer;} + ))); + //last_withdraw_result := ?refund_id; + + //debug if(debug_channel.bid) D.print(debug_show(refund_id)); + return #err(Types.errors(#auction_ended, "end_sale_nft_origyn - auction already closed - attempting escrow return ", ?caller)); + }; + + case(_){}; + }; + + //make sure amount is high enough + if(request.escrow_receipt.amount < current_sale_state.min_next_bid){ + //if the bid is too low we should refund their escrow + debug if(debug_channel.bid) D.print("refunding not high enough bid " # debug_show(verified.found_asset.escrow.amount)); + let service : Types.Service = actor((Principal.toText(state.canister()))); + let refund_id = service.sale_nft_origyn(#withdraw( + #escrow({ + amount = verified.found_asset.escrow.amount; //return back the whole escrow minus fee + buyer = verified.found_asset.escrow.buyer; + seller = verified.found_asset.escrow.seller; + token = verified.found_asset.escrow.token; + token_id = verified.found_asset.escrow.token_id; + withdraw_to = verified.found_asset.escrow.buyer;} + ))); + //last_withdraw_result := ?refund_id; + + //debug if(debug_channel.bid) D.print(debug_show(refund_id)); + + return #err(Types.errors(#bid_too_low, "bid_nft_origyn - bid too low - refund issued " , ?caller)); + }; + + let buy_now = switch(current_pricing.buy_now){ + + case(null){false}; + case(?val){ + if(val <= request.escrow_receipt.amount){ + true; + } else { + false; + }; + }; + }; + + debug if(debug_channel.bid) D.print("have buy now" # debug_show(buy_now, current_pricing.buy_now, current_sale_state.current_bid_amount)); + + let new_trx = Metadata.add_transaction_record(state,{ + token_id = request.escrow_receipt.token_id; + index = 0; + txn_type = #auction_bid({ + buyer = request.escrow_receipt.buyer; + amount = request.escrow_receipt.amount; + token = request.escrow_receipt.token; + broker_id = request.broker_id; + sale_id = request.sale_id; + extensible = #Empty; + } + ); + timestamp = state.get_time(); + }, caller); + + + debug if(debug_channel.bid) D.print("about to try refund"); + switch(new_trx){ + case(#ok(val)){ + //nyi: implement wait for quiet + + debug if(debug_channel.bid) D.print("in this" # debug_show(current_sale_state.current_escrow)); + + + //update the sale + + let newMinBid = switch(current_pricing.min_increase){ + case(#percentage(apercentage)){ + + //request.escrow_receipt.amount * (1 + apercentage) + return #err(Types.errors(#nyi, "bid_nft_origyn - percentage increase not implemented " , ?caller)); + + }; + case(#amount(aamount)){ + request.escrow_receipt.amount + aamount; + }; + }; + + + debug if(debug_channel.bid) D.print("have a min bid" # debug_show(newMinBid)); + + switch(current_sale_state.current_escrow){ + case(null){ + + //update state + debug if(debug_channel.bid) D.print("updating the state" # debug_show(request)); + current_sale_state.current_bid_amount := request.escrow_receipt.amount; + current_sale_state.min_next_bid := newMinBid; + current_sale_state.current_escrow := ?request.escrow_receipt; + current_sale_state.current_broker_id := request.broker_id; + }; + case(?val){ + + //update state + debug if(debug_channel.bid) D.print("Before" # debug_show(val.amount) # debug_show(val)); + current_sale_state.current_bid_amount := request.escrow_receipt.amount; + current_sale_state.min_next_bid := newMinBid; + current_sale_state.current_escrow := ?request.escrow_receipt; + current_sale_state.current_broker_id := request.broker_id; + debug if(debug_channel.bid) D.print("After" # debug_show(val.amount) # debug_show(val)); + //refund the escrow + //nyi: this would be better triggered by an event + //if this fails they can still manually withdraw the escrow. + debug if(debug_channel.bid) D.print("Trying refund escrow " # debug_show(val.amount) # debug_show(val)); + let service : Types.Service = actor((Principal.toText(state.canister()))); + let refund_id = service.sale_nft_origyn(#withdraw( + #escrow({ + amount = val.amount; + buyer = val.buyer; + seller = val.seller; + token = val.token; + token_id = val.token_id; + withdraw_to = val.buyer;} + ) + )); + + //last_withdraw_result := ?refund_id; + debug if(debug_channel.bid) D.print("done"); + //debug if(debug_channel.bid) D.print(debug_show(refund_id)); + + }; + }; + + if(buy_now){ + + debug if(debug_channel.bid) D.print("handling buy now"); + + let service : Types.Service = actor((Principal.toText(state.canister()))); + + let result = await service.sale_nft_origyn(#end_sale(request.escrow_receipt.token_id)); + + switch(result){ + case(#ok(val)){ + switch(val){ + case(#end_sale(val)){ + return #ok(val); + }; + case(_){ + return #err(Types.errors(#improper_interface, "bid_nft_origyn - buy it now call to end sale had odd response " # debug_show(result), ?caller )); + }; + }; + }; + case(#err(err)){ + return #err(err); + }; + }; + + //call ourseves to close the auction + }; + return #ok(val); + }; + case(#err(err)){ + return #err(Types.errors(err.error, "bid_nft_origyn - create transaction record " # err.flag_point, ?caller)); + }; + }; + + + }; + + + //pulls the token out of a sale + private func _get_token_from_sales_status(status: Types.SaleStatus) : Types.TokenSpec{ + switch(status.sale_type){ + case(#auction(auction_status)){ + return switch(auction_status.config){ + case(#auction(auction_config)){ + return auction_config.token; + }; + case(_){ + debug if(debug_channel.bid) D.print("getTokenfromSalesstatus not configured for type"); + assert(false); + return #extensible(#Empty); + }; + }; + }; + /* case(_){ + debug if(debug_channel.bid) D.print("getTokenfromSalesstatus not configured for type"); + assert(false); + return #extensible(#Empty); + }; */ + }; + + }; + + +} \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations/lib.mo b/src/origyn_nft_reference/migrations/lib.mo new file mode 100644 index 0000000..6986e89 --- /dev/null +++ b/src/origyn_nft_reference/migrations/lib.mo @@ -0,0 +1,49 @@ +import MigrationTypes "./types"; +import v0_1_0 "./v000_001_000"; +import D "mo:base/Debug"; + +module { + let upgrades = [ + v0_1_0.upgrade, + // do not forget to add your new migration upgrade method here + ]; + + let downgrades = [ + v0_1_0.downgrade, + // do not forget to add your new migration downgrade method here + ]; + + func getMigrationId(state: MigrationTypes.State): Nat { + return switch (state) { + case (#v0_0_0(_)) 0; + case (#v0_1_0(_)) 1; + // do not forget to add your new migration id here + // should be increased by 1 as it will be later used as an index to get upgrade/downgrade methods + }; + }; + + public func migrate( + prevState: MigrationTypes.State, + nextState: MigrationTypes.State, + args: MigrationTypes.Args + ): MigrationTypes.State { + + D.print("in migrate" # debug_show(prevState)); + var state = prevState; + var migrationId = getMigrationId(prevState); + D.print("getting migration id"); + let nextMigrationId = getMigrationId(nextState); + D.print(debug_show(nextMigrationId)); + + while (migrationId != nextMigrationId) { + D.print("in nft while"); + let migrate = if (nextMigrationId > migrationId) upgrades[migrationId] else downgrades[migrationId - 1]; + D.print("upgrade should have run"); + migrationId := if (nextMigrationId > migrationId) migrationId + 1 else migrationId - 1; + + state := migrate(state, args); + }; + + return state; + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations/types.mo b/src/origyn_nft_reference/migrations/types.mo new file mode 100644 index 0000000..4fbd205 --- /dev/null +++ b/src/origyn_nft_reference/migrations/types.mo @@ -0,0 +1,20 @@ +import v0_1_0 "./v000_001_000/types"; + +module { + // do not forget to change current migration when you add a new one + // you should use this field to import types from you current migration anywhere in your project + // instead of importing it from migration folder itself + public let Current = v0_1_0; + + public type Args = { + owner: Principal; + storage_space: Nat; + // you can add any fields here to pass external data to your migrations + }; + + public type State = { + #v0_0_0: {#id; #data: ()}; + #v0_1_0: { #id; #data: v0_1_0.State }; + // do not forget to add your new migration state types here + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations/v000_000_000/lib.mo b/src/origyn_nft_reference/migrations/v000_000_000/lib.mo new file mode 100644 index 0000000..3e70412 --- /dev/null +++ b/src/origyn_nft_reference/migrations/v000_000_000/lib.mo @@ -0,0 +1,14 @@ +import MigrationTypes "../types"; + +module { + public func upgrade(prevmigration_state: MigrationTypes.State, args: MigrationTypes.Args): MigrationTypes.State { + + return #v0_0_0(#data); + }; + + public func downgrade(prev_migration_state: MigrationTypes.State, args: MigrationTypes.Args): MigrationTypes.State { + + return #v0_0_0(#data); + }; + +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations/v000_000_000/types.mo b/src/origyn_nft_reference/migrations/v000_000_000/types.mo new file mode 100644 index 0000000..b4d8c04 --- /dev/null +++ b/src/origyn_nft_reference/migrations/v000_000_000/types.mo @@ -0,0 +1,10 @@ + +// please do not import any types from your project outside migrations folder here +// it can lead to bugs when you change those types later, because migration types should not be changed +// you should also avoid importing these types anywhere in your project directly from here +// use MigrationTypes.Current property instead + + +module { + public type State = (); +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations/v000_001_000/lib.mo b/src/origyn_nft_reference/migrations/v000_001_000/lib.mo new file mode 100644 index 0000000..bce7c84 --- /dev/null +++ b/src/origyn_nft_reference/migrations/v000_001_000/lib.mo @@ -0,0 +1,79 @@ +import MigrationTypes "../types"; +import v0_1_0 "types"; +import v0_0_0 "../v000_000_000/types"; +import SB_lib "mo:stablebuffer_0_2_0/StableBuffer"; +import Map_lib "mo:map_6_0_0/Map"; +import CandyTypes_lib "mo:candy_0_1_10/types"; +import D "mo:base/Debug"; + +module { + public func upgrade(prev_migration_state: MigrationTypes.State, args: MigrationTypes.Args): MigrationTypes.State { + + + + + D.print("in upgrade"); + return #v0_1_0(#data({ + //holds info about the collection + + var collection_data : v0_1_0.CollectionData = { + var logo = null; + var name = null; + var symbol = null; + var owner = args.owner; + var managers = []; //managers have some special access to a collection. used for 3rd party managment dapps + var network = null; //networks have ultimate control over a collection + var metadata = null; //information about the collection + var active_bucket = null; //tracks the current bucket that storage is being assigned to + var allocated_storage = args.storage_space; //total allocated storage for this collection + var available_space = args.storage_space; //space remaning in the collection + }; + + //tracks strage buckets where library files can be stored + var buckets : Map_lib.Map = Map_lib.new(); + + //tracks token-id, library-id allocations and information about where the asset resides + var allocations : Map_lib.Map<(Text, Text), v0_1_0.AllocationRecord> = Map_lib.new<(Text, Text), v0_1_0.AllocationRecord>(); + + //tracks space on the gateway canister + var canister_availible_space = args.storage_space; + var canister_allocated_storage = args.storage_space; + + //basic logging functionality for the NFT + var log = SB_lib.initPresized(1000); + var log_history = SB_lib.initPresized<[v0_1_0.LogEntry]>(1); //holds log history + var log_harvester: Principal = args.owner; //can pull and delete logs + + //tracks metadata for a token-id + var nft_metadata = Map_lib.new(); + + //tracks escrows for sales + var escrow_balances : v0_1_0.EscrowBuyerTrie = Map_lib.new>>>(); + + //tracks sales revenue for sales + var sales_balances : v0_1_0.SalesSellerTrie = Map_lib.new>>>(); + + //tracks offers made from one user to another + var offers : Map_lib.Map> = Map_lib.new>(); + + //tracks the history of each token-id and the collection at token-id "" + var nft_ledgers : Map_lib.Map> = Map_lib.new>(); + + //tracks the active sales in the canister + //nyi: currently only store the latest sale so other data is destoyed, probably need to store somewhere, basic data is available in the ledger + var nft_sales : Map_lib.Map = Map_lib.new(); + })); + }; + + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + public func downgrade(migration_state: MigrationTypes.State, args: MigrationTypes.Args): MigrationTypes.State { + return #v0_0_0(#data); + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations/v000_001_000/types.mo b/src/origyn_nft_reference/migrations/v000_001_000/types.mo new file mode 100644 index 0000000..6981e3a --- /dev/null +++ b/src/origyn_nft_reference/migrations/v000_001_000/types.mo @@ -0,0 +1,330 @@ +import SB_lib "mo:stablebuffer_0_2_0/StableBuffer"; +import Map_lib "mo:map_6_0_0/Map"; +import CandyTypes_lib "mo:candy_0_1_10/types"; +// please do not import any types from your project outside migrations folder here +// it can lead to bugs when you change those types later, because migration types should not be changed +// you should also avoid importing these types anywhere in your project directly from here +// use MigrationTypes.Current property instead + + +module { + + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + + public let SB = SB_lib; + public let Map = Map_lib; + public let CandyTypes = CandyTypes_lib; + + public type CollectionData = { + var logo: ?Text; + var name: ?Text; + var symbol: ?Text; + var metadata: ?CandyTypes.CandyValue; + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + var allocated_storage: Nat; + var available_space : Nat; + var active_bucket: ?Principal; + }; + + public type BucketData = { + principal : Principal; + var allocated_space: Nat; + var available_space: Nat; + date_added: Int; + b_gateway: Bool; + var version: (Nat, Nat, Nat); + var allocations: Map.Map<(Text,Text), Int>; // (token_id, library_id), Timestamp + }; + + public type AllocationRecord = { + canister : Principal; + allocated_space: Nat; + var available_space: Nat; + var chunks: SB.StableBuffer; + token_id: Text; + library_id: Text; + }; + + public type LogEntry = { + event : Text; + timestamp: Int; + data: CandyTypes.CandyValue; + caller: ?Principal; + }; + + public type SalesSellerTrie = Map.Map>>>; + + + public type SalesBuyerTrie = Map.Map>>; + + public type SalesTokenIDTrie = Map.Map>; + + public type SalesLedgerTrie = Map.Map; + + public type EscrowBuyerTrie = Map.Map>>>; + + public type EscrowSellerTrie = Map.Map>>; + + public type EscrowTokenIDTrie = Map.Map>; + + public type EscrowLedgerTrie = Map.Map; + + public type Account = { + #principal : Principal; + #account : {owner: Principal; sub_account: ?Blob}; + #account_id : Text; + #extensible : CandyTypes.CandyValue; + }; + + public type EscrowRecord = { + amount: Nat; + buyer: Account; + seller:Account; + token_id: Text; + token: TokenSpec; + sale_id: ?Text; //locks the escrow to a specific sale + lock_to_date: ?Int; //locks the escrow to a timestamp + account_hash: ?Blob; //sub account the host holds the funds in + }; + + public type TokenSpec = { + #ic: ICTokenSpec; + #extensible : CandyTypes.CandyValue; //#Class + }; + + public type ICTokenSpec = { + canister: Principal; + fee: Nat; + symbol: Text; + decimals: Nat; + standard: { + #DIP20; + #Ledger; + #EXTFungible; + #ICRC1; + }; + }; + + public type PricingConfig = { + #instant; //executes an escrow recipt transfer -only available for non-marketable NFTs + #flat: { + token: TokenSpec; + amount: Nat; //Nat to support cycles + }; + //below have not been signficantly desinged or vetted + #dutch: { + start_price: Nat; + decay_per_hour: Float; + reserve: ?Nat; + }; + #auction: AuctionConfig; + #extensible:{ + #candyClass + } + }; + + public type AuctionConfig = { + reserve: ?Nat; + token: TokenSpec; + buy_now: ?Nat; + start_price: Nat; + start_date: Int; + ending: { + #date: Int; + #waitForQuiet: { + date: Int; + extention: Nat64; + fade: Float; + max: Nat + }; + }; + min_increase: { + #percentage: Float; + #amount: Nat; + }; + allow_list : ?[Principal]; + }; + + public type TransactionRecord = { + token_id: Text; + index: Nat; + txn_type: { + #auction_bid : { + buyer: Account; + amount: Nat; + token: TokenSpec; + sale_id: Text; + extensible: CandyTypes.CandyValue; + }; + #mint : { + from: Account; + to: Account; + //nyi: metadata hash + sale: ?{token: TokenSpec; + amount: Nat; //Nat to support cycles + }; + extensible: CandyTypes.CandyValue; + }; + #sale_ended : { + seller: Account; + buyer: Account; + + token: TokenSpec; + sale_id: ?Text; + amount: Nat;//Nat to support cycles + extensible: CandyTypes.CandyValue; + }; + #royalty_paid : { + seller: Account; + buyer: Account; + reciever: Account; + tag: Text; + token: TokenSpec; + sale_id: ?Text; + amount: Nat;//Nat to support cycles + extensible: CandyTypes.CandyValue; + }; + #sale_opened : { + pricing: PricingConfig; + sale_id: Text; + extensible: CandyTypes.CandyValue; + }; + #owner_transfer : { + from: Account; + to: Account; + extensible: CandyTypes.CandyValue; + }; + #escrow_deposit : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat;//Nat to support cycles + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #escrow_withdraw : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat;//Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #deposit_withdraw : { + buyer: Account; + token: TokenSpec; + amount: Nat;//Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #sale_withdraw : { + seller: Account; + buyer: Account; + token: TokenSpec; + token_id: Text; + amount: Nat; //Nat to support cycles + fee: Nat; + trx_id: TransactionID; + extensible: CandyTypes.CandyValue; + }; + #canister_owner_updated : { + owner: Principal; + extensible: CandyTypes.CandyValue; + }; + #canister_managers_updated : { + managers: [Principal]; + extensible: CandyTypes.CandyValue; + }; + #canister_network_updated : { + network: Principal; + extensible: CandyTypes.CandyValue; + }; + #data; //nyi + #burn; + #extensible : CandyTypes.CandyValue; + + }; + timestamp: Int; + }; + + //used to identify the transaction in a remote ledger; usually a nat on the IC + public type TransactionID = { + #nat : Nat; + #text : Text; + #extensible : CandyTypes.CandyValue + }; + + public type SaleStatus = { + sale_id: Text; //sha256?; + original_broker_id: ?Principal; + broker_id: ?Principal; + token_id: Text; + sale_type: { + #auction: AuctionState; + }; + }; + + public type EscrowReceipt = { + amount: Nat; //Nat to support cycles + seller: Account; + buyer: Account; + token_id: Text; + token: TokenSpec; + + }; + + public type AuctionState = { + config: PricingConfig; + var current_bid_amount: Nat; + var current_broker_id: ?Principal; + var end_date: Int; + var min_next_bid: Nat; + var current_escrow: ?EscrowReceipt; + var wait_for_quiet_count: ?Nat; + var allow_list: ?Map.Map; //empty set means everyone + var participants: Map.Map; + var status: { + #open; + #closed; + #not_started; + }; + var winner: ?Account; + }; + + public type State = { + // this is the data you previously had as stable variables inside your actor class + var collection_data : CollectionData; + var buckets : Map.Map; + var allocations : Map.Map<(Text, Text), AllocationRecord>; + var canister_availible_space : Nat; + var canister_allocated_storage : Nat; + var log : SB.StableBuffer; + var log_history : SB.StableBuffer<[LogEntry]>; + var log_harvester : Principal; + var offers : Map.Map>; + var nft_metadata : Map.Map; + var escrow_balances : EscrowBuyerTrie; + var sales_balances : SalesSellerTrie; + var nft_ledgers : Map.Map>; + var nft_sales : Map.Map; + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations_storage/lib.mo b/src/origyn_nft_reference/migrations_storage/lib.mo new file mode 100644 index 0000000..b469a46 --- /dev/null +++ b/src/origyn_nft_reference/migrations_storage/lib.mo @@ -0,0 +1,47 @@ +import v_0_1_0 "./v000_001_000"; +import MigrationTypes "./types"; +import D "mo:base/Debug"; + +module { + let upgrades = [ + + // do not forget to add your new migration upgrade method here + v_0_1_0.upgrade + ]; + + let downgrades = [ + v_0_1_0.downgrade, + // do not forget to add your new migration downgrade method here + ]; + + func getMigrationId(state: MigrationTypes.State): Nat { + return switch (state) { + case (#v0_0_0(_)) 0; + case (#v0_1_0(_)) 1; + // do not forget to add your new migration id here + // should be increased by 1 as it will be later used as an index to get upgrade/downgrade methods + }; + }; + + public func migrate( + prevState: MigrationTypes.State, + nextState: MigrationTypes.State, + args: MigrationTypes.Args + ): MigrationTypes.State { + var state = prevState; + var migrationId = getMigrationId(prevState); + + let nextMigrationId = getMigrationId(nextState); + + while (migrationId != nextMigrationId) { + D.print("in storage while"); + let migrate = if (nextMigrationId > migrationId) upgrades[migrationId] else downgrades[migrationId - 1]; + D.print("upgrade should have run"); + migrationId := if (nextMigrationId > migrationId) migrationId + 1 else migrationId - 1; + + state := migrate(state, args); + }; + + return state; + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations_storage/types.mo b/src/origyn_nft_reference/migrations_storage/types.mo new file mode 100644 index 0000000..15ae979 --- /dev/null +++ b/src/origyn_nft_reference/migrations_storage/types.mo @@ -0,0 +1,21 @@ +import v0_1_0 "./v000_001_000/types"; + +module { + // do not forget to change current migration when you add a new one + // you should use this field to import types from you current migration anywhere in your project + // instead of importing it from migration folder itself + public let Current = v0_1_0; + + public type Args = { + gateway_canister: Principal; + network: ?Principal; + storage_space: Nat; + caller: Principal; + }; + + public type State = { + #v0_0_0: {#id; #data:()}; + #v0_1_0: { #id; #data: v0_1_0.State }; + // do not forget to add your new migration state types here + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations_storage/v000_001_000/lib.mo b/src/origyn_nft_reference/migrations_storage/v000_001_000/lib.mo new file mode 100644 index 0000000..0b9665b --- /dev/null +++ b/src/origyn_nft_reference/migrations_storage/v000_001_000/lib.mo @@ -0,0 +1,35 @@ +import MigrationTypes "../types"; +import SB_lib "mo:stablebuffer_0_2_0/StableBuffer"; +import Map_lib "mo:map_6_0_0/Map"; +import CandyTypes_lib "mo:candy_0_1_10/types"; +import v0_1_0_types = "types"; + +module { + public func upgrade(prev_migration_state: MigrationTypes.State, args: MigrationTypes.Args): MigrationTypes.State { + + + return #v0_1_0(#data({ + var nft_metadata : Map_lib.Map = Map_lib.new(); + var collection_data : v0_1_0_types.CollectionDataForStorage = { + var owner = args.gateway_canister; + var managers = [args.caller]; + var network = args.network; + }; + var canister_availible_space = args.storage_space; + var canister_allocated_storage = args.storage_space; + + var allocations : Map_lib.Map<(Text, Text), v0_1_0_types.AllocationRecord> = Map_lib.new<(Text, Text), v0_1_0_types.AllocationRecord>(); + + //basic logging functionality for the NFT + var log = SB_lib.initPresized(1000); + var log_history = SB_lib.initPresized<[v0_1_0_types.LogEntry]>(1); //holds log history + var log_harvester: Principal = args.caller; //can pull and delete logs + })); + }; + + public func downgrade(migration_state: MigrationTypes.State, args: MigrationTypes.Args): MigrationTypes.State { + return #v0_0_0(#data); + }; + + +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/migrations_storage/v000_001_000/types.mo b/src/origyn_nft_reference/migrations_storage/v000_001_000/types.mo new file mode 100644 index 0000000..af6e3af --- /dev/null +++ b/src/origyn_nft_reference/migrations_storage/v000_001_000/types.mo @@ -0,0 +1,61 @@ +import SB_lib "mo:stablebuffer_0_2_0/StableBuffer"; +import Map_lib "mo:map_6_0_0/Map"; +import CandyTypes_lib "mo:candy_0_1_10/types"; +// please do not import any types from your project outside migrations folder here +// it can lead to bugs when you change those types later, because migration types should not be changed +// you should also avoid importing these types anywhere in your project directly from here +// use MigrationTypes.Current property instead + + +module { + + //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + + public let SB = SB_lib; + public let Map = Map_lib; + public let CandyTypes = CandyTypes_lib; + + public type CollectionData = { + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + }; + + public type CollectionDataForStorage = { + + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + + }; + + public type AllocationRecord = { + canister : Principal; + allocated_space: Nat; + var available_space: Nat; + var chunks: SB.StableBuffer; + token_id: Text; + library_id: Text; + }; + + public type LogEntry = { + event : Text; + timestamp: Int; + data: CandyTypes.CandyValue; + caller: ?Principal; + }; + + public type State = { + // this is the data you previously had as stable variables inside your actor class + var nft_metadata : Map.Map; + + var collection_data : CollectionData; + var allocations : Map.Map<(Text, Text), AllocationRecord>; + var canister_availible_space : Nat; + var canister_allocated_storage : Nat; + var log : SB.StableBuffer; + var log_history : SB.StableBuffer<[LogEntry]>; + var log_harvester : Principal; + }; +}; \ No newline at end of file diff --git a/src/origyn_nft_reference/mint.mo b/src/origyn_nft_reference/mint.mo new file mode 100644 index 0000000..944caa9 --- /dev/null +++ b/src/origyn_nft_reference/mint.mo @@ -0,0 +1,956 @@ +import Types "types"; +import Result "mo:base/Result"; +import CandyTypes "mo:candy_0_1_10/types"; +import Iter "mo:base/Iter"; +import Blob "mo:base/Blob"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Text "mo:base/Text"; +import Properties "mo:candy_0_1_10/properties"; +import Workspace "mo:candy_0_1_10/workspace"; +import Metadata "metadata"; +import TrieMap "mo:base/TrieMap"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Nat "mo:base/Nat"; +import Buffer "mo:base/Buffer"; +import Time "mo:base/Time"; +import NFTUtils "utils"; +import Map "mo:map_6_0_0/Map"; +import SB "mo:stablebuffer_0_2_0/StableBuffer"; + +module { + + //lets user turn debug messages on and off for local replica + let debug_channel = { + function_announce = false; + storage = false; + library = false; + stage = false; + mint = false; + remote = false; + }; + + //adds a library to the nft + private func handle_library(state : Types.State, token_id : Text, found_metadata : CandyTypes.CandyValue, caller : Principal) : Result.Result{ + //prep the library + debug if(debug_channel.library) D.print("in handle library"); + switch(Metadata.get_nft_library(found_metadata, ?caller)){ + case(#err(err)){}; //fine for now...library isn't required + case(#ok(library)){ + switch(library){ + case(#Array(item)){ + switch(item){ + case(#thawed(classes)){ + debug if(debug_channel.library) D.print("handling library in nft stage"); + for(this_item in classes.vals()){ + debug if(debug_channel.library) D.print("handling an item " # debug_show(this_item)); + //handle each library + let library_id = switch(Metadata.get_nft_text_property(this_item, Types.metadata.library_id)){ + case(#err(err)){return #err(Types.errors(#malformed_metadata, "handle_library - library needs library_id", ?caller))}; + case(#ok(val)){val}; + }; + + let library_size = switch(Metadata.get_nft_nat_property(this_item, Types.metadata.library_size)){ + case(#err(err)){return #err(Types.errors(#malformed_metadata, "handle_library - library needs size", ?caller))}; + case(#ok(val)){val}; + }; + + let library_type = switch(Metadata.get_nft_text_property(this_item, Types.metadata.library_location_type)){ + case(#err(err)){return #err(Types.errors(#malformed_metadata, "handle_library - library needs type", ?caller))}; + case(#ok(val)){val}; + }; + + debug if(debug_channel.library) D.print("handling " # debug_show(library_id, library_size, library_type)); + + if(library_type == "canister"){ + //find our current bucket + debug if(debug_channel.library) D.print("in a canister branch"); + + //todo: review what happens if storage = 0 + let active_bucket = switch(state.state.collection_data.active_bucket){ + case(null){ + debug if(debug_channel.library) D.print("thie active bucket was null and we are checking that the current canister has space so we can set it"); + if(state.state.canister_availible_space > 0){ + state.state.collection_data.active_bucket := ?state.canister(); + state.canister(); + } else { + return #err(Types.errors(#not_enough_storage, "handle_library - need to initialize storage for collections where gateway has no storage", ?caller)); + } + }; + case(?val){val}; + }; + + //D.print("active bucket is " # debug_show((active_bucket, state.canister(), state.state.buckets))); + + var canister_bucket = switch(Map.get(state.state.buckets, Map.phash, active_bucket)){ + case(null){ + //only happens once on first library addition + debug if(debug_channel.library) D.print("setting up the bucket for the first time through" # debug_show(state.state.canister_availible_space)); + let a_bucket = { + principal = state.canister(); + var allocated_space = state.state.canister_availible_space; + var available_space = state.state.canister_availible_space; //should still be the maximum amount + date_added = Time.now(); + b_gateway = true; + var version = (0,0,1); + var allocations = Map.new<(Text,Text), Int>(); + }; + //D.print("original bucket set uup " # debug_show(a_bucket)); + Map.set(state.state.buckets, Map.phash, state.canister(), a_bucket); + a_bucket; + }; + case(?a_bucket){ + //D.print("was already in the bucket"); + if(a_bucket.available_space >= library_size){ + //D.print("bucket still has space"); + a_bucket + } else { + //D.print("need a bucket"); + //need a new active bucket + var b_found = false; + var newItem = a_bucket; + label find for(this_item in Map.entries(state.state.buckets)){ + //D.print("testing bucket " # debug_show(this_item)); + if(this_item.1.available_space >= library_size){ + //D.print("updating the active bucket " # debug_show((this_item.0, token_id, library_id))); + b_found :=true; + newItem := this_item.1; + state.state.collection_data.active_bucket := ?this_item.0; + break find; + }; + }; + + if(b_found == true){ + debug if(debug_channel.library) D.print("found a bucket" # debug_show(newItem)); + newItem; + } else { + debug if(debug_channel.library) D.print("erroring because " # debug_show((a_bucket.available_space, library_size))); + //make sure that size isn't bigger than biggest possible size + return #err(Types.errors(#not_enough_storage, "stage_nft_origyn - need to initialize storage out side of this function, dynamic creation is nyi", ?caller)); + }; + }; + }; + }; + + debug if(debug_channel.library) D.print("have bucket is " # debug_show((canister_bucket, state.canister(),token_id, library_id))); + + + //make sure that there is space or create a new bucket + + + let allocation = switch(Map.get<(Text,Text), Types.AllocationRecord>(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (token_id, library_id))){ + case(null){ + //there is no allocation for this library yet, lets create it + debug if(debug_channel.library) D.print("no allocation for this library....creating"); + let a_allocation = { + canister = canister_bucket.principal; + allocated_space = library_size; + var available_space = library_size; + var chunks = SB.initPresized(1); + token_id = token_id; + library_id = library_id; + timestamp = state.get_time(); + }; + debug if(debug_channel.library) D.print("ceating this allocation fresh " # debug_show((a_allocation, token_id, library_id))); + Map.set<(Text,Text), Types.AllocationRecord>(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (token_id, library_id), a_allocation); + //D.print("testing allocation " # debug_show(canister_bucket.available_space, library_size)); + Map.set<(Text,Text), Int>(canister_bucket.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (token_id, library_id), state.get_time()); + canister_bucket.available_space -= library_size; + state.state.collection_data.available_space -= library_size; + if(state.canister() == canister_bucket.principal){ + state.state.canister_availible_space -= library_size; + }; + a_allocation; + }; + case(?val){ + + //this allocation already exists....did it change? If so, what do we do? + //NYI: erase the file and reset the allocation + debug if(debug_channel.library) D.print("this allocation is already here" # debug_show(val)); + + if(val.allocated_space == library_size){ + //do nothing + val; + } else if(val.allocated_space < library_size){ + + let a_allocation = { + canister = val.canister; + allocated_space = library_size; + //nyi: more to think through here + var available_space = val.available_space + (Nat.sub(library_size,val.allocated_space)); + var chunks = val.chunks; + token_id = token_id; + library_id = library_id; + timestamp = state.get_time(); + }; + Map.set<(Text,Text), Types.AllocationRecord>(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (token_id, library_id), a_allocation); + //canister_bucket.allocations := Map.set<(Text,Text), Int>(canister_bucket.allocations,( NFTUtils.library_hash, NFTUtils.library_equal), (token_id, library_id), state.get_time()); + debug if(debug_channel.library) D.print("testing allocation " # debug_show(canister_bucket.available_space, library_size)); + canister_bucket.available_space -= (library_size - val.allocated_space); + state.state.collection_data.available_space -= (library_size - val.allocated_space); + a_allocation; + } else{ + //nyi: here we would give some back, but we don't support shrining right now. + val; + } + } + }; + + debug if(debug_channel.library) D.print("ok allocation"); + }; + //nyi: if it is collection, should we check that it exists? + }; + }; + case(_){return #err(Types.errors(#malformed_metadata, "stage_nft_origyn - library should be thawed", ?caller));}; + }; + }; + case(_){return #err(Types.errors(#malformed_metadata, "stage_nft_origyn - library should be an array", ?caller));}; + }; + + }; + }; + + return #ok("ok"); + + }; + + //mints an NFT + public func mint_nft_origyn(state : Types.State, token_id : Text, new_owner : Types.Account, caller : Principal) : async Result.Result { + if(NFTUtils.is_owner_manager_network(state, caller) == false){return #err(Types.errors(#unauthorized_access, "mint_nft_origyn - not an owner", ?caller))}; + + let result = execute_mint(state, token_id, new_owner, null, caller); + + //notify library canisters of metadata + //warning: nyi: this needs to be moved to an async work flow as too many library canistes will overflow the cycle limit + + + debug if(debug_channel.storage) D.print("mint done...handling library" # debug_show((result))); + switch(result){ + case(#ok(data)){ + debug if(debug_channel.storage) D.print("have data " # debug_show(data)); + let library = Metadata.get_nft_library_array(data.1, ?caller); + switch(library){ + case(#err(err)){}; + case(#ok(library)){ + debug if(debug_channel.storage) D.print(debug_show(Iter.toArray(library.vals()))); + for(this_library in library.vals()){ + //we look at each library and if it is on another server we need + //to let that server know about the new metadata for the NFT + let found = Map.new(); + debug if(debug_channel.storage) D.print("processing a library" # debug_show((this_library, state.state.allocations))); + switch(Properties.getClassProperty(this_library, Types.metadata.library_id )){ + case(null){ + //shouldnt be here + debug if(debug_channel.storage) D.print("shouldnt be here"); + }; + case(?library_id){ + debug if(debug_channel.storage) D.print(Conversions.valueToText(library_id.value)); + switch(Map.get(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (token_id, Conversions.valueToText(library_id.value)))){ + case(null){ + //shouldn't be here but won't fail + debug if(debug_channel.storage) D.print("shouldnt be here null get"); + }; + case(?val){ + if(val.canister != state.canister()){ + debug if(debug_channel.storage) D.print("updating metadata for storage " # debug_show(val.canister) # debug_show(data.1)); + if(Map.get(found, Map.phash, val.canister) == null){ + let storage_actor : Types.StorageService = actor(Principal.toText(val.canister)); + let storage_future = storage_actor.refresh_metadata_nft_origyn(token_id, data.1); + Map.set(found, Map.phash, val.canister, true); + }; + } else { + debug if(debug_channel.storage) D.print("didnt update storage" # debug_show((val.canister, state.canister()))); + }; + }; + }; + }; + }; + + }; + }; + }; + return #ok(data.0) + }; + case(#err(err)){ + return #err(err); + } + }; + }; + + //stages the metadata of an nft + public func stage_nft_origyn( + state : Types.State, + metadata : CandyTypes.CandyValue, + caller: Principal): Result.Result{ + debug if(debug_channel.stage) D.print("in stage"); + //only an owner can stage + if(NFTUtils.is_owner_manager_network(state,caller) == false){return #err(Types.errors(#unauthorized_access, "stage_nft_origyn - not an owner", ?caller))}; + + //ensure id is in the class + debug if(debug_channel.stage) D.print("looking for id"); + let id_val = Conversions.valueToText( + switch(Properties.getClassProperty(metadata, "id")){ + case(null){ + return #err(Types.errors(#id_not_found_in_metadata, "stage_nft_origyn - find id", ?caller)); + }; + case(?found){ + found.value; + }; + }); + + + debug if(debug_channel.stage) D.print("id is " # id_val); + + + debug if(debug_channel.stage) D.print("looking for system"); + //if this exists we should throw + let found_system = switch(Properties.getClassProperty(metadata, "__system")){ + case(null){}; + case(?found){ + return #err(Types.errors(#attempt_to_stage_system_data, "stage_nft_origyn - find system", ?caller)); + } + }; + + var found_metadata : CandyTypes.CandyValue = #Empty; + //try to find existing metadata + switch(Map.get(state.state.nft_metadata, Map.thash, id_val)){ + case(null){ + //D.print("Does not exist yet"); + //does not exist yet; + //add status "staged" + found_metadata := #Class(switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [{name = "__system"; mode=#Set(#Class([{name="status"; value=#Text(Types.nft_status_staged); immutable = false}]))}])){ + case(#err(errType)){ + return #err(Types.errors(#update_class_error, "stage_nft_origyn - set staged status", ?caller)); + }; + case(#ok(result)){ + result; + } + }); + debug if(debug_channel.stage) D.print("we should have status now"); + debug if(debug_channel.stage) D.print(debug_show(found_metadata)); + + + switch(handle_library(state, id_val ,found_metadata, caller)){ + case(#err(err)){ + return #err(err); + }; + case(#ok(ok)){}; + }; + + Map.set(state.state.nft_metadata, Map.thash, id_val, found_metadata); + }; + + case(?this_metadata){ + //exists + debug if(debug_channel.stage) D.print("exists"); + //check to see if it is minted yet.Array + let system_node : CandyTypes.CandyValue = switch(Properties.getClassProperty(this_metadata, "__system")){ + case(null){return #err(Types.errors(#cannot_find_status_in_metadata, "stage_nft_origyn - find system", ?caller));}; + case(?found){found.value}; + }; + + let status : Text = Conversions.valueToText( + switch(Properties.getClassProperty(system_node, "status")){ + case(null){return #err(Types.errors(#cannot_find_status_in_metadata, "stage_nft_origyn - find status", ?caller));}; + case(?found){found.value}; + }); + + + //nyi: limit to immutable items after mint + if(Metadata.is_minted(this_metadata) == false){ + //if(1 == 1){ + //todo + //pull __system vars + debug if(debug_channel.stage) D.print("dealing with 1==1"); + switch(Properties.getClassProperty(this_metadata, "__system")){ + case(null){ + //this branch may be an error + + return #err(Types.errors(#improper_interface, "stage_nft_origyn - __system node not found", ?caller)); + + + }; + case(?found){ + //inject __system vars into new metadata + debug if(debug_channel.stage) D.print("updating metadata to include system"); + found_metadata := #Class(switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [{name = "__system"; mode=#Set(found.value)}])){ + case(#err(errType)){ + return #err(Types.errors(#update_class_error, "stage_nft_origyn - set staged status", ?caller)); + }; + case(#ok(result)){ + result; + } + }); + }; + }; + + switch(handle_library(state,id_val, found_metadata, caller)){ + case(#err(err)){ + return #err(err); + }; + case(#ok(ok)){}; + }; + + + + //swap metadata + Map.set(state.state.nft_metadata, Map.thash, id_val, found_metadata); + return #ok(id_val); + } else { + return #err(Types.errors(#cannot_restage_minted_token, "stage_nft_origyn - cannot replace minted token", ?caller)) + }; + }; + }; + return #ok(id_val); + }; + + + + public func stage_library_nft_origyn( + state : Types.State, + chunk : Types.StageChunkArg, + caller : Principal) : Result.Result { + if(NFTUtils.is_owner_manager_network(state,caller) == false){return #err(Types.errors(#unauthorized_access, "stage_library_nft_origyn - not an owner", ?caller))}; + debug if(debug_channel.stage) D.print("in stage_library_nft_origyn" # debug_show(chunk)); + var b_updated_meta = false; + var metadata = switch(Metadata.get_metadata_for_token(state, chunk.token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(err.error, "stage_library_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + let library_meta = switch(Metadata.get_library_meta(metadata, chunk.library_id)){ + case(#ok(found)){ + found; + }; + case(#err(err)){ + chunk.filedata; + }; + }; + + debug if(debug_channel.stage) D.print("found library meta" # debug_show(library_meta)); + + + switch(chunk.filedata){ + case(#Class(val)){ + debug if(debug_channel.stage) D.print("checking filedata" # debug_show(chunk.filedata)); + //add it + //confirm library_id + let library_id = switch(Properties.getClassProperty(chunk.filedata, Types.metadata.library_id)){ + case(null){ + debug if(debug_channel.stage) D.print("library not found"); + return #err(Types.errors(#library_not_found, "stage_nft_origyn - provided filedata must be a claass with library_id attribute", ?caller)); + + }; + case(?id){ + switch(id.value){ + case(#Text(id)){ + id; + }; + case(_){ + return #err(Types.errors(#library_not_found, "stage_library_nft_origyn - provided filedata must be a claass with library_id as #Text attribute", ?caller)); + + }; + } + }; + }; + + + + + + debug if(debug_channel.stage) D.print("new_library - " # library_id); + let new_library = Buffer.Buffer(1); + var b_found = false; + + debug if(debug_channel.stage) D.print("rebuilding" # debug_show(Metadata.get_nft_library(metadata, ?caller))); + + let library = switch(Metadata.get_nft_library(metadata, ?caller)){ + case(#err(err)){ + return #err(Types.errors(#library_not_found, "stage_library_nft_origyn - cannot find library" # err.flag_point, ?caller)); + + }; + case(#ok(val)){val}; + }; + + debug if(debug_channel.stage) D.print("current library " # debug_show(library)); + + + label rebuild for(this_item in Conversions.valueToValueArray(library).vals()){ + debug if(debug_channel.stage) D.print("handling rebuild for " # debug_show(this_item)); + switch(Properties.getClassProperty(this_item, Types.metadata.library_id)){ + case(null){ + //shouldn't be here + //D.print("shouldnt be here"); + }; + case(?id){ + debug if(debug_channel.stage) D.print(debug_show((id, library_id))); + if(Conversions.valueToText(id.value) == library_id){ + + debug if(debug_channel.stage) D.print("replaceing with filechunk"); + + new_library.add(chunk.filedata); + b_found := true; + + + //break rebuild; + }else{ + debug if(debug_channel.stage) D.print("keeping library"); + new_library.add(this_item); + }; + }; + + }; + }; + + debug if(debug_channel.stage) D.print("did we find it?" # debug_show(b_found)); + + if(b_found == false){ + new_library.add(chunk.filedata); + }; + + + var found_metadata = #Class(switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [{name = Types.metadata.library; mode=#Set(#Array(#thawed(new_library.toArray())))}])){ + case(#err(errType)){ + + switch(errType){ + /* + //info: This is an upgrade for mintpass. remove once deployed - don't mark something immutable that you don't want immutable + case(#Immutable){ + var newMetadata = Buffer.Buffer(1); + for(thisCurrentItem in Conversions.valueToProperties(metadata).vals()){ + if(thisCurrentItem.name == Types.metadata.library){ + newMetadata.add({ + name=Types.metadata.library; + value = #Array(#thawed(new_library.toArray())); + immutable = false; + }); + } else { + newMetadata.add(thisCurrentItem); + }; + }; + + newMetadata.toArray(); + }; */ + case(_){ + return #err(Types.errors(#update_class_error, "stage_library_nft_origyn - cannot update" # debug_show(errType), ?caller)); + + }; + }; + }; + case(#ok(result)){ + result; + }; + }); + + debug if(debug_channel.stage) D.print("new metadata is " # debug_show(found_metadata)); + + + + + metadata := found_metadata; + + debug if(debug_channel.stage) D.print("handling library"); + switch(handle_library(state, chunk.token_id , metadata, caller)){ + case(#err(err)){ + return #err(err); + }; + case(#ok(ok)){}; + }; + + b_updated_meta := true; + + + }; + case(#Empty){ + //do nothing? + }; + case(_){ + return #err(Types.errors(#library_not_found, "stage_library_nft_origyn - cannot find library id in metadata - " # chunk.token_id # " library_id: " # chunk.library_id, ?caller)); + }; + }; + + debug if(debug_channel.stage) D.print("checking allocation" # debug_show((chunk.token_id, chunk.library_id))); + + + //swap metadata + debug if(debug_channel.stage) D.print("is metadata updated " # debug_show(b_updated_meta)); + if(b_updated_meta){ + Map.set(state.state.nft_metadata, Map.thash, chunk.token_id, metadata); + }; + + if(chunk.content.size() > 0){ + //make sure we have an allocation for space for this chunk + let allocation = switch(Map.get<(Text, Text), Types.AllocationRecord>(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (chunk.token_id, chunk.library_id))){ + case(null){return #err(Types.errors(#not_enough_storage, "stage_library_nft_origyn - allocation not found for " # chunk.token_id # " " # chunk.library_id, ?caller));}; + case(?val)(val); + }; + + debug if(debug_channel.stage) D.print("found allocation " # debug_show(allocation)); + + if( allocation.canister == state.canister()){ + //the chunk goes on this canister + + debug if(debug_channel.stage) D.print("looking for workspace"); + var found_workspace : CandyTypes.Workspace = + switch(state.nft_library.get(chunk.token_id)){ + case(null){ + //chunk doesn't exist; + debug if(debug_channel.stage) D.print("does not exist"); + let new_workspace = Workspace.initWorkspace(2); + debug if(debug_channel.stage) D.print("puting Zone"); + debug if(debug_channel.stage) D.print(debug_show(chunk.filedata)); + + if(chunk.content.size() > allocation.available_space){ + debug if(debug_channel.stage) D.print("not enough storage in allocation null library " # debug_show(chunk.token_id, chunk.library_id, chunk.content.size(),allocation.available_space)); + return #err(Types.errors(#not_enough_storage, "stage_library_nft_origyn - chunk bigger than available" # chunk.token_id # " " # chunk.library_id, ?caller)); + }; + + new_workspace.add(Workspace.initDataZone(CandyTypes.destabalizeValue(chunk.filedata))); + + debug if(debug_channel.stage) D.print("put the zone"); + let new_library = TrieMap.TrieMap(Text.equal,Text.hash); + debug if(debug_channel.stage) D.print("putting workspace"); + new_library.put(chunk.library_id, new_workspace); + debug if(debug_channel.stage) D.print("putting library"); + state.nft_library.put(chunk.token_id, new_library); + new_workspace; + }; + case(?library){ + switch(library.get(chunk.library_id)){ + case(null){ + debug if(debug_channel.stage) D.print("nft exists but not file"); + //nft exists but this file librry entry doesnt exist + //nftdoesn't exist; + if(chunk.content.size() > allocation.available_space){ + debug if(debug_channel.stage) D.print("not enough storage in allocation not null" # debug_show(chunk.token_id, chunk.library_id, chunk.content.size(),allocation.available_space)); + return #err(Types.errors(#not_enough_storage, "stage_library_nft_origyn - chunk bigger than available" # chunk.token_id # " " # chunk.library_id, ?caller)); + }; + let new_workspace = Workspace.initWorkspace(2); + + new_workspace.add(Workspace.initDataZone(CandyTypes.destabalizeValue(chunk.filedata))); + + + library.put(chunk.library_id, new_workspace); + new_workspace; + }; + case(?workspace){ + debug if(debug_channel.stage) D.print("found workspace"); + workspace; + }; + }; + + }; + }; + + //file the chunk + debug if(debug_channel.stage) D.print("filing the chunk"); + let file_chunks = switch(found_workspace.getOpt(1)){ + case(null){ + if(found_workspace.size()==0){ + //nyi: should be an error because no filedata + found_workspace.add(Workspace.initDataZone(#Empty)); + }; + if(found_workspace.size()==1){ + found_workspace.add(Buffer.Buffer(0)); + }; + found_workspace.get(1); + }; + case(?dz){ + dz; + }; + }; + + debug if(debug_channel.stage) D.print("do we have chunks"); + if(chunk.chunk + 1 <= SB.size(allocation.chunks)){ + //this chunk already exists in the allocatioin + //see what size it is + debug if(debug_channel.stage) D.print("branch a"); + let current_size = SB.get(allocation.chunks,chunk.chunk); + if(chunk.content.size() > current_size){ + //allocate more space + debug if(debug_channel.stage) D.print("allocate more"); + SB.put(allocation.chunks, chunk.chunk, chunk.content.size()); + allocation.available_space += (chunk.content.size() - current_size); + } else if (chunk.content.size() != current_size){ + //give space back + debug if(debug_channel.stage) D.print("give space back"); + SB.put(allocation.chunks, chunk.chunk, chunk.content.size()); + allocation.available_space -= (current_size - chunk.content.size()); + } else {}; + } else { + //D.print("branch b "); + for(this_index in Iter.range(SB.size(allocation.chunks), chunk.chunk)){ + //D.print(debug_show(this_index)); + if(this_index == chunk.chunk){ + if(chunk.content.size() > allocation.available_space){ + debug if(debug_channel.stage) D.print("not enough storage in allocation not branch b" # debug_show(chunk.token_id, chunk.library_id, chunk.content.size(),allocation.available_space)); + + return #err(Types.errors(#not_enough_storage, "stage_library_nft_origyn - chunk bigger than available past workspace" # chunk.token_id # " " # chunk.library_id, ?caller)); + }; + + debug if(debug_channel.stage) D.print("branch c" # debug_show(allocation, chunk.content.size())); + SB.add(allocation.chunks, chunk.content.size()); + allocation.available_space -= chunk.content.size(); + } else { + //D.print("brac d"); + SB.add(allocation.chunks, 0); + } + }; + }; + + //D.print("putting the chunk"); + if(chunk.chunk + 1 <= file_chunks.size()){ + file_chunks.put(chunk.chunk, #Blob(chunk.content)); + } else { + debug if(debug_channel.stage) D.print("in putting the chunk iter"); + debug if(debug_channel.stage) D.print(debug_show(chunk.chunk)); + //D.print(debug_show(file_chunks.size())); + + for(this_index in Iter.range(file_chunks.size(),chunk.chunk)){ + //D.print(debug_show(this_index)); + if(this_index == chunk.chunk){ + //D.print("index was chunk" # debug_show(this_index)); + file_chunks.add(#Blob(chunk.content)); + } else { + //D.print("index wasnt chunk" # debug_show(this_index)); + file_chunks.add(#Blob(Blob.fromArray([]))); + } + }; + + }; + + //D.print("returning"); + return #ok(#staged(state.canister())); + + } else { + //we need to send this chunk to storage + //D.print("This needs to be filed elsewhere " # debug_show(allocation)); + return #ok(#stage_remote({ + allocation = allocation; + metadata = metadata;})); + }; + } else { + + return #ok(#staged(state.canister())); + }; + }; + + //sends the file chunk to remote storage + public func stage_library_nft_origyn_remote( + state : Types.State, + chunk : Types.StageChunkArg, + allocation : Types.AllocationRecord, + metadata : CandyTypes.CandyValue, + caller : Principal) : async Result.Result { + + + + debug if(debug_channel.remote) D.print("we have an allocationin the remote" # debug_show((allocation, metadata))); + + //we shouldn't need to pre remove the space because the allocation was already made + + let storage_actor : Types.StorageService = actor(Principal.toText(allocation.canister)); + let response = await storage_actor.stage_library_nft_origyn(chunk, Types.allocation_record_stabalize(allocation), (if(chunk.chunk == 0){metadata} else {#Empty})); + + debug if(debug_channel.remote) D.print("allocation to remot result" # debug_show(response)); + + switch(response){ + case(#ok(result)){ + //update the allocation + //keep in mind the allocation passed to us is no longer the correct one.Buffer + let refresh_state = state.refresh_state(); + + var fresh_allocation = switch(Map.get<(Text, Text), Types.AllocationRecord>(refresh_state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (chunk.token_id, chunk.library_id))){ + case(null){return #err(Types.errors(#not_enough_storage, "stage_library_nft_origyn_remote - allocation not found for " # chunk.token_id # " " # chunk.library_id, ?caller));}; + case(?val)(val); + }; + + //make sure we have an allocation for space for this chunk + + + if(chunk.chunk + 1 <= SB.size(fresh_allocation.chunks)){ + //this chunk already exists in the allocation + //see what size it is + let current_size = SB.get(allocation.chunks,chunk.chunk); + if(chunk.content.size() > current_size){ + //allocate more space + + SB.put(fresh_allocation.chunks, chunk.chunk, chunk.content.size()); + fresh_allocation.available_space += (chunk.content.size() - current_size); + } else if(chunk.content.size() != current_size){ + //give space back + SB.put(fresh_allocation.chunks, chunk.chunk, chunk.content.size()); + fresh_allocation.available_space -= (current_size - chunk.content.size()); + } else {}; + } else { + for(this_index in Iter.range(SB.size(fresh_allocation.chunks), chunk.chunk)){ + if(this_index == chunk.chunk){ + SB.add(fresh_allocation.chunks, chunk.content.size()); + fresh_allocation.available_space += chunk.content.size(); + } else { + SB.add(fresh_allocation.chunks, 0); + } + }; + }; + }; + case(#err(err)){ + return #err(err); + }; + }; + return response; + + }; + + //executes the mint and gives owner ship to the specified user + public func execute_mint(state: Types.State, token_id : Text, newOwner : Types.Account, escrow: ?Types.EscrowReceipt, caller : Principal) : Result.Result<(Text, CandyTypes.CandyValue),Types.OrigynError>{ + debug if(debug_channel.mint) D.print("in mint"); + var metadata = switch(Metadata.get_metadata_for_token(state, token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "execute_mint " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + //cant mint if already minted + if(Metadata.is_minted(metadata)){ + return #err(Types.errors(#item_already_minted, "execute_mint - already minted", ?caller)); + }; + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_status, #Text("minted")); + + //get the royalties + //nyi: should ask the network for the network royalty and node royalty + + + var collection = switch(Metadata.get_metadata_for_token(state, "", caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + #Class([]); + }; + case(#ok(val)){ + val; + }; + }; + + var primary_royalties = switch(Properties.getClassProperty(collection, Types.metadata.primary_royalties_default)){ + case(null){ + #Array(#frozen([])); + }; + case(?val){ + val.value; + }; + }; + + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_primary_royalty, primary_royalties); + + var secondary_royalties = switch(Properties.getClassProperty(collection, Types.metadata.secondary_royalties_default)){ + case(null){ + #Array(#frozen([])); + }; + case(?val){ + val.value; + }; + }; + + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_secondary_royalty, secondary_royalties); + + + var node_principal = switch(Properties.getClassProperty(collection, Types.metadata.__system_node)){ + case(null){ + #Principal(Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")); //dev fund + }; + case(?val){ + val.value; + }; + }; + + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_node, node_principal); + + var originator_principal = switch(Properties.getClassProperty(collection, Types.metadata.__system_originator)){ + case(null){ + #Principal(Principal.fromText("yfhhd-7eebr-axyvl-35zkt-z6mp7-hnz7a-xuiux-wo5jf-rslf7-65cqd-cae")); //dev fund + }; + case(?val){ + val.value; + }; + }; + + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_originator, originator_principal); + + //set new owner + metadata := switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [ + { + name = Types.metadata.owner; + mode = #Set(switch(newOwner){ + case(#principal(newOwner)){#Principal(newOwner);}; + case(#account_id(newOwner)){#Text(newOwner);}; + case(#extensible(newOwner)){newOwner;}; + case(#account(buyer)){#Array(#frozen([#Principal(buyer.owner), #Option(switch(buyer.sub_account){case(null){null}; case(?val){?#Blob(val);}})]))}; + }); + } + ])){ + case(#ok(props)){ + #Class(props); + }; + case(#err(err)){ + //maybe the owner is immutable + switch(Metadata.is_nft_owner(metadata, newOwner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "mint_nft_origyn retrieve owner " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + + if(val == false){ + //tried to set an immutable owner; + return #err(Types.errors(#update_class_error, "mint_nft_origyn - error setting owner " # token_id, ?caller)); + }; + //owner will be left the same as the immutable + metadata; + }; + }; + }; + }; + + + + //need to add the mint transaction record here + let txn_record = switch(Metadata.add_transaction_record(state ,{ + token_id = token_id; + index = 0; //mint should always be 0 + txn_type = #mint({ + from = #principal(caller); + to = newOwner; + sale = switch(escrow){ + case(null){null}; + case(?val){ + + ?{token= val.token; + amount = val.amount;} + + }; + }; + extensible = #Empty; + }); + timestamp = Time.now(); + chain_hash = []; + }, caller)){ + case(#err(err)){ + //potentially big error once certified data is in place...may need to throw + return #err(Types.errors(err.error, "mint_nft_origyn add_transaction_record" # err.flag_point, ?caller)); + }; + case(#ok(val)){val}; + }; + + Map.set(state.state.nft_metadata, Map.thash, token_id, metadata); + + + + return #ok((token_id, metadata)); + }; + + +} \ No newline at end of file diff --git a/src/origyn_nft_reference/owner.mo b/src/origyn_nft_reference/owner.mo new file mode 100644 index 0000000..af05f8c --- /dev/null +++ b/src/origyn_nft_reference/owner.mo @@ -0,0 +1,362 @@ +import Types "types"; +import Result "mo:base/Result"; +import CandyTypes "mo:candy_0_1_10/types"; +import Iter "mo:base/Iter"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Text "mo:base/Text"; +import Properties "mo:candy_0_1_10/properties"; +import Workspace "mo:candy_0_1_10/workspace"; +import Metadata "metadata"; +import Market "market"; +import TrieMap "mo:base/TrieMap"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Buffer "mo:base/Buffer"; +import Time "mo:base/Time"; +import NFTUtils "utils"; +import DIP721 "DIP721"; +import EXT "mo:ext/Core"; +import MigrationTypes "./migrations/types"; + + +module { + + type StateAccess = Types.State; + let Map = MigrationTypes.Current.Map; + + let debug_channel = { + owner = false; + + }; + + public func share_wallet_nft_origyn(state: StateAccess, request : Types.ShareWalletRequest, caller : Principal) : Result.Result { + //nyi: this should only be used by an owner to transfer between wallets that they own. to protect this, any assets in the canister associated with the account/principal + //should be moved along with the the token + + //nyi: transfers from one accountid to another must be from the same principal.Array + //to transfer from accountId they must be in the null subaccount + + var metadata = switch(Metadata.get_metadata_for_token(state, request.token_id, caller, ?state.canister(), state.state.collection_data.owner)){ + case(#err(err)){ + return #err(Types.errors(#token_not_found, "share_nft_origyn token not found" # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + //can't owner transfer if token is soulbound + if (Metadata.is_soulbound(metadata)) { + return #err(Types.errors(#token_non_transferable, "share_nft_origyn ", ?caller)); + }; + + let owner = switch(Metadata.get_nft_owner(metadata)){ + case(#err(err)){ + return #err(Types.errors(err.error, "share_nft_origyn " # err.flag_point, ?caller)); + }; + case(#ok(val)){ + val; + }; + }; + + + + if(Types.account_eq(owner, #principal(caller)) == false){ + //cant transfer something you dont own; + debug if(debug_channel.owner) D.print("should be returning item not owned"); + return #err(Types.errors(#item_not_owned, "share_nft_origyn cannot transfer item from does not own", ?caller)); + }; + + + //look for an existing sale + switch(Market.is_token_on_sale(state, metadata, caller)){ + case(#err(err)){return #err(Types.errors(err.error, "share_nft_origyn ensure_no_sale " # err.flag_point, ?caller))}; + case(#ok(val)){ + if(val == true){ + return #err(Types.errors(#existing_sale_found, "share_nft_origyn - sale exists " # request.token_id , ?caller)); + }; + }; + + }; + + + debug if(debug_channel.owner) D.print(debug_show(owner)); + debug if(debug_channel.owner) D.print(debug_show(request.from)); + if(Types.account_eq(owner, request.from) == false){ + //cant transfer something you dont own; + debug if(debug_channel.owner) D.print("should be returning item not owned"); + return #err(Types.errors(#item_not_owned, "share_nft_origyn cannot transfer item from does not own", ?caller)); + }; + + //set new owner + //D.print("Setting new Owner"); + metadata := switch(Properties.updateProperties(Conversions.valueToProperties(metadata), [ + { + name = Types.metadata.owner; + mode = #Set(Metadata.account_to_candy(request.to)); + } + ])){ + case(#ok(props)){ + #Class(props); + }; + case(#err(err)){ + //maybe the owner is immutable + + return #err(Types.errors(#update_class_error, "share_nft_origyn - error setting owner " # request.token_id, ?caller)); + + }; + }; + + let wallets = Buffer.Buffer(1); + //add the wallet share + switch(Metadata.get_system_var(metadata, Types.metadata.__system_wallet_shares)){ + case(#Empty){}; + case(#Array(#thawed(val))){ + let result = Map.new(); + for(thisItem in val.vals()){ + wallets.add(thisItem); + }; + }; + case(#Array(#frozen(val))){ + for(thisItem in val.vals()){ + wallets.add(thisItem); + }; + }; + case(_){ + return #err(Types.errors(#improper_interface, "share_nft_origyn - wallet_share not an array", null)); + }; + }; + + wallets.add(Metadata.account_to_candy(owner)); + + metadata := Metadata.set_system_var(metadata, Types.metadata.__system_wallet_shares, #Array(#frozen(wallets.toArray()))); + + + debug if(debug_channel.owner) D.print("updating metadata"); + Map.set(state.state.nft_metadata, Map.thash, request.token_id, metadata); + + //D.print("Adding transaction"); + let txn_record = switch(Metadata.add_transaction_record(state, { + token_id = request.token_id; + index = 0; //mint should always be 0 + txn_type = #owner_transfer({ + from = request.from; + to = request.to; + extensible = #Empty; + }); + timestamp = Time.now(); + chain_hash = []; + }, caller)){ + case(#err(err)){ + //potentially big error once certified data is in place...may need to throw + return #err(Types.errors(err.error, "share_nft_origyn add_transaction_record" # err.flag_point, ?caller)); + }; + case(#ok(val)){val}; + }; + + //D.print("returning transaction"); + #ok({ + transaction =txn_record; + assets= []}); + }; + + + public func transferDip721(state: StateAccess, from: Principal, to: Principal, tokenAsNat: Nat, caller: Principal) : async DIP721.Result{ + //nyi: this should be refactored to use market_transfer_nft_origyn where we look for an escrow from one user to the other and use the full escrow for the transfer + //if the escrow doesn't exist then we should fail + + + //warning: Do not use this to transfer from one owner to another, you will eventually lose valuble assets. use market_transfer_nft_origyn + //nyi: determine if this is a marketable NFT and take proper action + //marketable NFT may not be transfered between owner wallets execpt through share_nft_origyn + let token_id = NFTUtils.get_nat_as_token_id(tokenAsNat); + + let escrows = switch(Market.find_escrow_reciept(state, #principal(to), #principal(from), token_id)){ + case(#ok(val)){val}; + case(#err(err)){ + return #Err(#Other("escrow required for DIP721 transfer - failure of DIP721 transferFrom " # err.flag_point)); + }; + }; + + if(Map.size(escrows) == 0 ){ + return #Err(#Other("escrow required for DIP721 transfer - failure of DIP721 transferFrom")); + }; + + //dip721 is not discerning. If it finds a first asset it will use that for the transfer + let first_asset = Iter.toArray(Map.entries(escrows))[0]; + + if(first_asset.1.sale_id != null){ + return #Err(#Other("escrow required for DIP721 transfer - failure of DIP721 transferFrom due to sale_id in escrow reciept" # debug_show(first_asset))); + }; + + let result = await Market.market_transfer_nft_origyn_async(state, { + token_id = token_id; + sales_config = + { + escrow_receipt = ?first_asset.1; + pricing = #instant; + broker_id = null; + }; + }, from); + + + + switch(result){ + case(#ok(data)){ + return #Ok(data.index); + }; + case(#err(err)){ + + return #Err(#Other("failure of DIP721 transferFrom " # err.flag_point)); + + }; + }; + }; + + public func transferExt(state: StateAccess, request: EXT.TransferRequest, caller : Principal) : async EXT.TransferResponse { + //todo: this should be refactored to use market_transfer_nft_origyn where we look for an escrow from one user to the other and use the full escrow for the transfer + //if the escrow doesn't exist then we should fail + + if(Types.account_eq(#principal(caller), switch(request.from){ + case(#principal(data)){ + #principal(data); + }; + case(#address(data)){ + #account_id(data); + };}) == false ){ + + return #err(#Other("unauthorized caller must be the from address" # debug_show(request))); + + }; + + switch(getNFTForTokenIdentifier(state, request.token)){ + case(#ok(data)){ + + let escrows = switch(Market.find_escrow_reciept(state, switch(request.from){ + case(#principal(data)){ + #principal(data); + }; + case(#address(data)){ + #account_id(data); + }; + /* case(_){ + return #err(#Other("accountID extensible not implemented in EXT transfer from")); + }; */ + }, switch(request.from){ + case(#principal(data)){ + #principal(data); + }; + case(#address(data)){ + #account_id(data); + }; + /* case(_){ + return #err(#Other("accountID extensible not implemented in EXT transfer from")); + }; */ + }, data)){ + case(#ok(val)){val}; + case(#err(err)){ + return #err(#Other("escrow required for EXT transfer - failure of EXT tranfer " # err.flag_point)); + }; + }; + + if(Map.size(escrows) == 0 ){ + return #err(#Other("escrow required of EXT tranfer transfer - failure of EXT tranfer")); + }; + + //dip721 is not discerning. If it finds a first asset it will use that for the transfer + let first_asset = Iter.toArray(Map.entries(escrows))[0]; + + if(first_asset.1.sale_id != null){ + return #err(#Other("escrow required of EXT tranfer transfer - failure of EXT tranfer due to sale_id in escrow reciept" # debug_show(first_asset))); + }; + + let result = await Market.market_transfer_nft_origyn_async(state, { + token_id = data; + sales_config = + { + escrow_receipt = ?first_asset.1; + pricing = #instant; + broker_id = null; + }; + }, caller); + + switch(result){ + case(#ok(data)){ + return #ok(data.index); + }; + case(#err(err)){ + + return #err(#Other("failure of EXT tranfer " # err.flag_point)); + + }; + }; + }; + case(#err(err)){ + return #err(#InvalidToken(request.token)); + }; + }; + }; + + public func getNFTForTokenIdentifier(state: StateAccess, token: EXT.TokenIdentifier) : Result.Result { + + for(this_nft in Map.entries(state.state.nft_metadata)){ + switch(Metadata.get_nft_id(this_nft.1)){ + case(#ok(data)){ + + if(Text.hash(data) == EXT.TokenIdentifier.getIndex(token) ){ + return #ok(data); + }; + }; + case(_){}; + }; + + }; + return #err(Types.errors(#token_not_found, "getNFTForTokenIdentifier", null)); + }; + + public func bearerEXT(state: StateAccess, tokenIdentifier: EXT.TokenIdentifier, caller :Principal) : Result.Result{ + + switch(getNFTForTokenIdentifier(state, tokenIdentifier)){ + case(#ok(data)){ + switch(Metadata.get_nft_owner( + switch(Metadata.get_metadata_for_token(state, + data + , caller, null, state.state.collection_data.owner)){ + case(#err(err)){ + return #err(#Other("Token not found")); + }; + case(#ok(val)){ + val; + }; + })){ + case(#err(err)){ + return #err(#Other("ownerOf " # err.flag_point)); + }; + case(#ok(val)){ + switch(val){ + case(#principal(data)){ + return #ok(EXT.User.toAID(#principal(data))); + }; + case(#account_id(data)){ + return #ok(data); + }; + case(_){ + return #err(#Other("ownerOf unsupported owner type by DIP721" # debug_show(val))); + }; + }; + + }; + }; + }; + case(#err(err)){ + return #err(#InvalidToken(tokenIdentifier)); + }; + }; + + }; + + + + +} \ No newline at end of file diff --git a/src/origyn_nft_reference/phone_book.mo b/src/origyn_nft_reference/phone_book.mo new file mode 100644 index 0000000..3fa5a1b --- /dev/null +++ b/src/origyn_nft_reference/phone_book.mo @@ -0,0 +1,91 @@ +import Buffer "mo:base/Buffer"; +import Error "mo:base/Error"; +import Iter "mo:base/Iter"; +import Map "mo:base/TrieMap"; +import Nat "mo:base/Nat"; +import Text "mo:base/Text"; +//this simple phone book canister is a stand in for a more robust +//service required for the exOS + +shared (deployer) actor class PhoneBook(admin : Principal) = this { + + type Name = Text; + type Canister = Principal; + type Canisters = [Canister]; + + + type Entry = { + collection: Text; + canisters: Canisters; + }; + + stable var phonebook_stable : [(Name, Canisters)] = []; + let phonebook = Map.fromEntries(phonebook_stable.vals(), Text.equal, Text.hash); + + stable var admins_stable : Canisters = [admin]; + + + private func isAdmin(caller: Principal) : Bool { + for(this_item in admins_stable.vals()){ + if (this_item == caller){ + return true; + } + }; + return false; + }; + + public shared (msg) func insert(name : Name, entry : Canisters): async ?Canisters { + if(isAdmin(msg.caller) == false){throw(Error.reject("Not an admin"));}; + phonebook.put(name, entry); + return phonebook.get(name); + }; + + public shared (msg) func list(skip : ?Nat, take : ?Nat): async [(Name,Canisters)] { + if(isAdmin(msg.caller) == false){throw(Error.reject("Not an admin"));}; + let results = Buffer.Buffer<(Name, Canisters)>(phonebook.size()); + for(this_item in phonebook.entries()){ + results.add((this_item.0, this_item.1)); + }; + return results.toArray(); + }; + + public shared (msg) func delete(name : Name): async ?Canisters { + if(isAdmin(msg.caller) == false){throw(Error.reject("Not an admin"));}; + phonebook.delete(name); + return phonebook.get(name); + }; + + public shared (msg) func update_admin(admins : Canisters): async Canisters { + if(isAdmin(msg.caller) == false){throw(Error.reject("Not an admin"));}; + admins_stable := admins; + return admins_stable; + }; + + public query func lookup(name : Name) : async ?[Canister] { + phonebook.get(name) + }; + + public query func reverse_lookup(value : Canister) : async (Name) { + var name : Name = ""; + label search for(element in phonebook.entries()){ + var array_canisters = element.1; + label searchVal for(val in array_canisters.vals()){ + if(val == value){ + name := element.0; + break searchVal; + }; + }; + }; + return name; + }; + + system func preupgrade() { + phonebook_stable := Iter.toArray(phonebook.entries()); + + }; + + system func postupgrade() { + phonebook_stable := []; + + }; +}; diff --git a/src/origyn_nft_reference/storage_canister.mo b/src/origyn_nft_reference/storage_canister.mo new file mode 100644 index 0000000..82a864a --- /dev/null +++ b/src/origyn_nft_reference/storage_canister.mo @@ -0,0 +1,369 @@ +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import Buffer "mo:base/Buffer"; +import Candy "mo:candy_0_1_10/types"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversions "mo:candy_0_1_10/conversion"; +import Cycles "mo:base/ExperimentalCycles"; +import D "mo:base/Debug"; +import DIP721 "DIP721"; +import EXT "mo:ext/Core"; +import Error "mo:base/Error"; +import Iter "mo:base/Iter"; +import Metadata "metadata"; +import Mint "mint"; +import NFTUtils "utils"; +import Nat "mo:base/Nat"; +import Nat32 "mo:base/Nat32"; +import Nat8 "mo:base/Nat8"; +import Principal "mo:base/Principal"; +import Map "mo:map_6_0_0/Map"; +import Result "mo:base/Result"; +import Storage_Store "storage_store"; +import Text "mo:base/Text"; +import Time "mo:base/Time"; +import TrieMap "mo:base/TrieMap"; +import Types "./types"; +import Workspace "mo:candy_0_1_10/workspace"; +import http "storage_http"; +import Migrations "./migrations_storage"; +import MigrationTypes "./migrations_storage/types"; + + +shared (deployer) actor class Storage_Canister(__initargs : Types.StorageInitArgs) = this { + stable var SIZE_CHUNK = 2048000; //max message size + + stable var ic : Types.IC = actor("aaaaa-aa"); + + let debug_channel = { + refresh = false; + }; + + + stable var nft_library_stable : [(Text,[(Text,CandyTypes.AddressedChunkArray)])] = []; + stable var tokens_stable : [(Text, Types.HttpAccess)] = []; + + let initial_storage = switch(__initargs.storage_space){ + case(null){ + SIZE_CHUNK * 500; //default is 1GB + }; + case(?val){ + if(val > SIZE_CHUNK * 1000){ //only 2GB useable in a canister + assert(false); + }; + val; + } + }; + + //initialize types and stable storage + let StateTypes = MigrationTypes.Current; + let SB = StateTypes.SB; + + stable var migration_state : MigrationTypes.State = #v0_0_0(#data); + + migration_state := Migrations.migrate(migration_state, #v0_1_0(#id), { + owner = deployer.caller; + network = __initargs.network; + storage_space = initial_storage; + gateway_canister = __initargs.gateway_canister; + caller = deployer.caller ;}); + + // do not forget to change #v0_1_0 when you are adding a new migration + let #v0_1_0(#data(state_current)) = migration_state; + + //the library needs to stay unstable for maleable access to the Buffers that make up the file chunks + private var nft_library : TrieMap.TrieMap> = NFTUtils.build_library(nft_library_stable); + //store access tokens for owner assets to owner specific data + private var tokens : TrieMap.TrieMap = TrieMap.fromEntries(tokens_stable.vals(), Text.equal, Text.hash); + + private var canister_principal : ?Principal = null; + + + // returns the canister principal + private func get_canister(): Principal { + switch(canister_principal){ + case(null){ + canister_principal := ?Principal.fromActor(this); + Principal.fromActor(this); + }; + case(?val){ + val; + } + } + }; + + + //builds the state for passing to child modules + let get_state : () -> Types.StorageState = func (){ + { + var state = state_current; + var nft_library = nft_library; + get_time = get_time; + canister = get_canister; + refresh_state = get_state; + tokens = tokens; + }; + }; + + + //used for testing + stable var __time_mode : {#test; #standard;} = #standard; + private var __test_time : Int = 0; + + private func get_time() : Int{ + switch(__time_mode){ + case(#standard){return Time.now();}; + case(#test){return __test_time;}; + }; + + }; + + // get current owner of the nft + public query func get_collection_owner_nft_origyn(): async Principal.Principal { + state_current.collection_data.owner; + }; + + // get current manager of the nft + public query func get_collection_managers_nft_origyn(): async [Principal.Principal] { + state_current.collection_data.managers; + }; + + // get current network of the nft + public query func get_collection_network_nft_origynt(): async ?Principal.Principal { + state_current.collection_data.network; + }; + + //stores the chunk for a library + public shared (msg) func stage_library_nft_origyn(chunk : Types.StageChunkArg, allocation: Types.AllocationRecordStable, metadata : CandyTypes.CandyValue) : async Result.Result { + + return await Storage_Store.stage_library_nft_origyn( + get_state(), + chunk, + allocation, + metadata, + msg.caller); + }; + + //when meta data is updated on the gateway it will call this function to make sure the + //the storage contatiner has the same info + public shared (msg) func refresh_metadata_nft_origyn(token_id: Text, metadata: CandyTypes.CandyValue) : async Result.Result{ + + debug if(debug_channel.refresh) D.print("in metadata refresh"); + if(state_current.collection_data.owner != msg.caller){return #err(Types.errors(#unauthorized_access, "refresh_metadata_nft_origyn - storage - not an owner", ?msg.caller))}; + + switch(Map.get(state_current.nft_metadata, Map.thash, token_id)){ + case(null){ + D.print("error"); + return #err(Types.errors(#token_not_found, "refresh_metadata_nft_origyn - storage - cannot find metadata to replace - " # token_id, ?msg.caller)); + + }; + case(_){}; + }; + + debug if(debug_channel.refresh) D.print("in metadata refresh"); + debug if(debug_channel.refresh) D.print("in metadata refresh"); + D.print("putting metadata" # debug_show(metadata)); + Map.set(state_current.nft_metadata, Map.thash, token_id, metadata); + + return #ok(true); + }; + + //used for testing + public shared (msg) func __advance_time(new_time: Int) : async Int { + + if(msg.caller != state_current.collection_data.owner){ + throw Error.reject("not owner"); + }; + __test_time := new_time; + return __test_time; + + }; + + //used for testing + public shared (msg) func __set_time_mode(newMode: {#test; #standard;}) : async Bool { + if(msg.caller != state_current.collection_data.owner){ + throw Error.reject("not owner"); + }; + __time_mode := newMode; + return true; + }; + + //get storage info from the container + public query func storage_info_nft_origyn() : async Result.Result{ + return #ok({ + allocated_storage = state_current.canister_allocated_storage; + available_space = state_current.canister_availible_space; + allocations = Iter.toArray(Iter.map(Map.vals<(Text,Text),Types.AllocationRecord>(state_current.allocations),Types.allocation_record_stabalize)); + }); + }; + + //secure storage info from the container + public func storage_info_secure_nft_origyn() : async Result.Result{ + return #ok({ + allocated_storage = state_current.canister_allocated_storage; + available_space = state_current.canister_availible_space; + allocations = Iter.toArray(Iter.map(Map.vals<(Text,Text),Types.AllocationRecord>(state_current.allocations),Types.allocation_record_stabalize)); + }); + }; + + + + private func _chunk_nft_origyn(request : Types.ChunkRequest, caller: Principal) : Result.Result{ + //nyi: we need to check to make sure the chunk is public or caller has rights + + let allocation = switch(Map.get<(Text, Text), Types.AllocationRecord>(state_current.allocations,( NFTUtils.library_hash, NFTUtils.library_equal), (request.token_id, request.library_id))){ + case(null){ + return #err(Types.errors(#library_not_found, "chunk_nft_origyn - allocatio for token, library - " # request.token_id # " " # request.token_id, ?caller)); + }; + case(?val){val}; + }; + + switch(nft_library.get(request.token_id)){ + case(null){ + return #err(Types.errors(#token_not_found, "chunk_nft_origyn - cannot find token id - " # request.token_id, ?caller)); + }; + case(?token){ + switch(token.get(request.library_id)){ + case(null){ + return #err(Types.errors(#library_not_found, "chunk_nft_origyn - cannot find library id: token_id - " # request.token_id # " library_id - " # request.library_id, ?caller)); + }; + case(?item){ + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return #err(Types.errors(#library_not_found, "chunk_nft_origyn - chunk was empty: token_id - " # request.token_id # " library_id - " # request.library_id # " chunk - " # debug_show(request.chunk), ?caller)); + }; + case(?zone){ + //D.print("size of zone"); + //D.print(debug_show(zone.size())); + let requested_chunk = switch(request.chunk){ + case(null){ + //just want the allocation + return #ok(#chunk({ + content = Blob.fromArray([]); + total_chunks = zone.size(); + current_chunk = request.chunk; + storage_allocation = Types.allocation_record_stabalize(allocation); + })); + + }; + case(?val){val}; + }; + switch(zone.getOpt(requested_chunk)){ + case(null){ + return #err(Types.errors(#library_not_found, "chunk_nft_origyn - cannot find chunk id: token_id - " # request.token_id # " library_id - " # request.library_id # " chunk - " # debug_show(request.chunk), ?caller)); + }; + case(?chunk){ + switch(chunk){ + case(#Bytes(wval)){ + switch(wval){ + case(#thawed(val)){ + return #ok(#chunk({ + content = Blob.fromArray(val.toArray()); + total_chunks = zone.size(); + current_chunk = request.chunk; + storage_allocation = Types.allocation_record_stabalize(allocation); + })); + }; + case(#frozen(val)){ + return #ok(#chunk({ + content = Blob.fromArray(val); + total_chunks = zone.size(); + current_chunk = request.chunk; + storage_allocation = Types.allocation_record_stabalize(allocation); + })); + } + }; + }; + + case(#Blob(wval)){ + + return #ok(#chunk({ + content = wval; + total_chunks = zone.size(); + current_chunk = request.chunk; + storage_allocation = Types.allocation_record_stabalize(allocation); + })); + + }; + case(_){ + return #err(Types.errors(#content_not_deserializable, "chunk_nft_origyn - chunk did not deserialize: token_id - " # request.token_id # " library_id - " # request.library_id # " chunk - " # debug_show(request.chunk), ?caller)); + }; + } + }; + }; + }; + }; + }; + + }; + + }; + }; + return #err(Types.errors(#nyi, "chunk_nft_origyn - nyi", ?caller)); + }; + + //gets a chunk for a library + public query (msg) func chunk_nft_origyn(request : Types.ChunkRequest) : async Result.Result{ + + return _chunk_nft_origyn(request, msg.caller); + }; + + //gets a chunk for a library + public shared (msg) func chunk_secure_nft_origyn(request : Types.ChunkRequest) : async Result.Result{ + //warning: test this, it may change the caller to the local canister + return _chunk_nft_origyn(request, msg.caller); + }; + + public query(msg) func http_request(rawReq: Types.HttpRequest): async (http.HTTPResponse) { + return http.http_request(get_state(), rawReq, msg.caller); + }; + + // A streaming callback based on NFTs. Returns {[], null} if the token can not be found. + // Expects a key of the following pattern: "nft/{key}". + public query func nftStreamingCallback(tk : http.StreamingCallbackToken) : async http.StreamingCallbackResponse { + //D.print("The nftstreamingCallback"); + //D.print(debug_show(tk)); + + return http.nftStreamingCallback(tk, get_state()); + }; + + public query func http_request_streaming_callback( + tk : http.StreamingCallbackToken + ) : async http.StreamingCallbackResponse { + return http.http_request_streaming_callback(tk, get_state()); + }; + + public query (msg) func whoami(): async (Principal) { msg.caller }; + + public shared func canister_status(request: { canister_id: Types.canister_id }): async Types.canister_status { + await ic.canister_status(request) + }; + + public query func cycles(): async Nat { + Cycles.balance() + }; + + system func preupgrade() { + + + tokens_stable := Iter.toArray(tokens.entries()); + + let nft_library_stable_buffer = Buffer.Buffer<(Text, [(Text, CandyTypes.AddressedChunkArray)])>(nft_library.size()); + for(thisKey in nft_library.entries()){ + let this_library_buffer : Buffer.Buffer<(Text, CandyTypes.AddressedChunkArray)> = Buffer.Buffer<(Text, CandyTypes.AddressedChunkArray)>(thisKey.1.size()); + for(this_item in thisKey.1.entries()){ + this_library_buffer.add((this_item.0, Workspace.workspaceToAddressedChunkArray(this_item.1)) ); + }; + nft_library_stable_buffer.add((thisKey.0, this_library_buffer.toArray())); + }; + + nft_library_stable := nft_library_stable_buffer.toArray(); + + }; + + system func postupgrade() { + nft_library_stable := []; + tokens_stable := []; + }; +}; diff --git a/src/origyn_nft_reference/storage_http.mo b/src/origyn_nft_reference/storage_http.mo new file mode 100644 index 0000000..b79e074 --- /dev/null +++ b/src/origyn_nft_reference/storage_http.mo @@ -0,0 +1,1452 @@ +import http "mo:http/Http"; +import httpparser "mo:httpparser/lib"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversion "mo:candy_0_1_10/conversion"; +import Properties "mo:candy_0_1_10/properties"; +import Principal "mo:base/Principal"; +import D "mo:base/Debug"; +import Nat "mo:base/Nat"; +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import Buffer "mo:base/Buffer"; +import Char "mo:base/Char"; +import Iter "mo:base/Iter"; +import List "mo:base/List"; +import Option "mo:base/Option"; +import Result "mo:base/Result"; +import Text "mo:base/Text"; +import Time "mo:base/Time"; +import TrieMap "mo:base/TrieMap"; +import Random "mo:base/Random"; +import Types "types"; +import Metadata "metadata"; +import NFTUtils "utils"; +import Map "mo:map_6_0_0/Map"; + + +//this is a virtual copy of http.mo except that we use Types.StorageState +//and a few clauses around passing requests to storage canisters are removed(because we are already on a storage canister) +module { + + let debug_channel = { + streaming = false; + large_content = false; + library = false; + request = false; + }; + + //the max size of a streaming chunk + private let __MAX_STREAM_CHUNK = 2048000; + + public type HTTPResponse = { + body : Blob; + headers : [http.HeaderField]; + status_code : Nat16; + streaming_strategy : ?StreamingStrategy; + }; + + public type StreamingStrategy = { + #Callback: { + callback : shared () -> async (); + token : StreamingCallbackToken; + }; + }; + + public type StreamingCallbackToken = { + content_encoding : Text; + index : Nat; + key : Text; + }; + + public type StreamingCallbackResponse = { + body : Blob; + token : ?StreamingCallbackToken; + }; + + public type HeaderField = (Text, Text); + + public type HttpRequest = { + body: Blob; + headers: [HeaderField]; + method: Text; + url: Text; + }; + + // generates a random access key for use with procuring owner's assets + public func gen_access_key(): async Text { + let entropy = await Random.blob(); // get initial entropy + var rand = Text.replace(debug_show(entropy), #text("\\"), ""); + Text.replace(rand, #text("\""), ""); + }; + + //handels stream content with chunk requests + public func handle_stream_content( + state : Types.StorageState, + token_id : Text, + library_id : Text, + start : ?Nat, + end : ?Nat, + contentType : Text, + data : CandyTypes.DataZone, + req : httpparser.ParsedHttpRequest + ) : HTTPResponse { + + + let canister_id: Text = Principal.toText(state.canister()); + let canister = actor (canister_id) : actor { nftStreamingCallback : shared () -> async () }; + + + debug if(debug_channel.streaming) D.print("Handling an range streaming NFT" # debug_show(token_id)); + var size : Nat = 0; + //find the right data zone + for(this_item in data.vals()){ + switch(this_item){ + case(#Bytes(bytes)){ + switch(bytes){ + case(#thawed(aArray)){ + size := size + aArray.size(); + }; + case(#frozen(aArray)){ + size := size + aArray.size(); + }; + }; + }; + case(#Blob(bytes)){ + + size := size + bytes.size(); + + }; + case(_){}; + }; + + }; + + var rEnd = switch(end){ + case(null){size-1 : Nat;}; + case(?v){v}; + }; + + let rStart = switch(start){ + case(null){0;}; + case(?v){v}; + }; + + debug if(debug_channel.streaming)D.print( Nat.toText(rStart) # " - " # Nat.toText(rEnd) # " / " #Nat.toText(size)); + + if(rEnd - rStart : Nat > __MAX_STREAM_CHUNK){ + rEnd := rStart + __MAX_STREAM_CHUNK - 1; + }; + + if(rEnd - rStart : Nat > __MAX_STREAM_CHUNK){ + debug if(debug_channel.streaming) D.print("handling big branch"); + + let cbt = _stream_media(token_id, library_id, rStart, data, rStart, rEnd, size); + + debug if(debug_channel.streaming)D.print("The cbt: " # debug_show(cbt.callback)); + { + //need to use streaming strategy + status_code = 206; + headers = [ + ("Content-Type", contentType), + ("Accept-Ranges", "bytes"), + //("Content-Range", "bytes 0-1/" # Nat.toText(size)), + ("Content-Range", "bytes " # Nat.toText(rStart) # "-" # Nat.toText(rEnd) # "/" # Nat.toText(size)), + //("Content-Range", "bytes 0-"# Nat.toText(size-1) # "/" # Nat.toText(size)), + ("Content-Length", Nat.toText(cbt.payload.size())), + ("Cache-Control","private"), + ]; + body = cbt.payload; + streaming_strategy = switch (cbt.callback) { + case (null) { null; }; + case (? tk) { + ?#Callback({ + token = tk; + callback = canister.nftStreamingCallback; + }); + }; + }; + }; + } else { + //just one chunk + debug if(debug_channel.streaming) D.print("returning short array"); + + let cbt = _stream_media(token_id, library_id, rStart, data, rStart, rEnd, size); + + debug if(debug_channel.streaming) D.print("the size " # Nat.toText(cbt.payload.size())); + return { + status_code = 206; + headers = [ + ("Content-Type", contentType), + ("Accept-Ranges", "bytes"), + ("Content-Range", "bytes " # Nat.toText(rStart) # "-" # Nat.toText(rEnd) # "/" # Nat.toText(size)), + //("Content-Range", "bytes 0-"# Nat.toText(size-1) # "/" # Nat.toText(size)), + ("Content-Length", Nat.toText(cbt.payload.size())), + ("Cache-Control","private") + ]; + body = cbt.payload; + streaming_strategy = null; + }; + }; + + + }; + + //handles non-streaming large content + public func handleLargeContent( + state : Types.StorageState, + key : Text, + contentType : Text, + data : CandyTypes.DataZone, + req : httpparser.ParsedHttpRequest + ) : HTTPResponse { + let result = _stream_content(key, 0, data); + + debug if(debug_channel.large_content)D.print("handling large content " # debug_show(result.callback)); + + let canister_id: Text = Principal.toText(state.canister()); + let canister = actor (canister_id) : actor { nftStreamingCallback : shared () -> async () }; + + var b_foundRange : Bool = false; + var start_range : Nat = 0; + var end_range : Nat = 0; + + //nyi: should the data zone cache this? + { + status_code = 200; + headers = [ + ("Content-Type", contentType), + ("accept-ranges", "bytes"), + ("Cache-Control","private"), + ]; + body = result.payload; + streaming_strategy = switch (result.callback) { + case (null) { null; }; + case (? tk) { + ?#Callback({ + token = tk; + callback = canister.nftStreamingCallback; + }); + }; + }; + }; + + }; + + public func _stream_media( + token_id : Text, + library_id :Text, + index : Nat, + data : CandyTypes.DataZone, + rStart : Nat, + rEnd : Nat, + size : Nat, + + ) : { + payload: Blob; // Payload based on the index. + callback: ?StreamingCallbackToken // Callback for next chunk (if applicable). + } { + + debug if(debug_channel.streaming) D.print("in _stream_media"); + debug if(debug_channel.streaming)D.print("token_id " # debug_show(token_id)); + debug if(debug_channel.streaming)D.print("library_id " # debug_show(library_id)); + debug if(debug_channel.streaming)D.print("index " # debug_show(index)); + debug if(debug_channel.streaming)D.print(debug_show(rEnd) # " " # debug_show(rStart) # " "); + + var tracker : Nat = 0; + let buf_size = if(Nat.sub(rEnd,index) >= __MAX_STREAM_CHUNK){ + __MAX_STREAM_CHUNK; + } else { + rEnd - index + 1 : Nat; + }; + + + debug if(debug_channel.streaming)D.print("buffer of size " # debug_show(buf_size)); + let payload : Buffer.Buffer = Buffer.Buffer(buf_size); + var blob_payload = Blob.fromArray([]); + + label getData for(this_item in data.vals()){ + + debug if(debug_channel.streaming) D.print("zone processing" # debug_show(tracker) # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + let chunk = Conversion.valueUnstableToBlob(this_item); + + let chunkSize = chunk.size(); + if(chunkSize + tracker < index){ + debug if(debug_channel.streaming) D.print("skipping chunk"); + tracker += chunkSize; + continue getData; + }; + + debug if(debug_channel.streaming) D.print("current " # debug_show((rStart, rEnd, tracker, chunk.size()))); + + if( + (tracker == rStart) and (tracker + chunk.size() == rEnd + 1) + ){ + debug if(debug_channel.streaming)D.print("matched rstart and rend on whole chunk"); + blob_payload := chunk; + break getData; + }; + + debug if(debug_channel.streaming)D.print("got past the chunk check" # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + debug if(debug_channel.streaming) D.print(debug_show(chunk.size())); + for(this_byte in chunk.vals()){ + debug if(tracker % 1000000 == 0){ + debug if(debug_channel.streaming) D.print(debug_show(tracker % 10000000) # " " # debug_show(tracker) # " " # debug_show(index) # " " # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + }; + if(tracker >= index){ + payload.add(this_byte); + }; + tracker += 1; + if(tracker > rEnd or tracker > Nat.sub(index + __MAX_STREAM_CHUNK, 1)){ + //D.print("broke tracker at " # debug_show(tracker) # " nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + break getData; + } + }; + }; + //D.print("should have the buffer" # debug_show(payload.size())); + //D.print("tracker: " # Nat.toText(tracker)); + + if(blob_payload.size() == 0){ + blob_payload := Blob.fromArray(payload.toArray()); + }; + + let token = if(tracker >= size or tracker >= rEnd){ + debug if(debug_channel.streaming) D.print("found the end, returning null" # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + null; + } else { + debug if(debug_channel.streaming) D.print("_streaming returning the key " # "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size)); + ?{ + content_encoding = "gzip"; + index = tracker; + key = "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(rStart) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size); + //key = "nft-m/" # token_id # "|" # library_id # "|" # Nat.toText(tracker) # "|" # Nat.toText(rEnd) # "|" # Nat.toText(size); + } + }; + + {payload = blob_payload; callback=token}; + }; + + public func _stream_content( + key : Text, + index : Nat, + data : CandyTypes.DataZone, + ) : { + payload :Blob; // Payload based on the index. + callback: ?StreamingCallbackToken // Callback for next chunk (if applicable). + } { + let payload = data.get(index); + debug if(debug_channel.streaming) D.print("in private call back"); + debug if(debug_channel.streaming)D.print(debug_show(data.size())); + if (index + 1 == data.size()) return {payload = Conversion.valueUnstableToBlob(payload); callback = null}; + debug if(debug_channel.streaming)D.print("returning a new key" # key); + debug if(debug_channel.streaming)D.print(debug_show(key)); + {payload = Conversion.valueUnstableToBlob(payload); + callback = ?{ + content_encoding = "gzip"; + index = index + 1; + key = key; + }}; + }; + + + public func stream_media( + token_id : Text, + library_id : Text, + index : Nat, + data : CandyTypes.DataZone, + rStart : Nat, + rEnd : Nat, + size : Nat + ) : StreamingCallbackResponse { + let result = _stream_media( + token_id, + library_id, + index, + data, + rStart, + rEnd, + size + ); + + debug if(debug_channel.streaming)D.print("the media content"); + debug if(debug_channel.streaming)D.print(debug_show(result)); + { + body = result.payload; + token = result.callback; + }; + }; + + //determines how a library item should be rendere in an http request + public func renderLibrary( + state : Types.StorageState, + req : httpparser.ParsedHttpRequest, + metadata : CandyTypes.CandyValue, + token_id: Text, + library_id: Text) : HTTPResponse { + + debug if(debug_channel.library) D.print("in render library)"); + + let library_meta = switch(Metadata.get_library_meta(metadata, library_id)){ + case(#err(err)){return _not_found("meta not found - " # token_id # " " # library_id);}; + case(#ok(val)){val}; + + + }; + + debug if(debug_channel.library) D.print("library meta" #debug_show(library_meta)); + + let location_type = switch(Metadata.get_nft_text_property(library_meta, "location_type")){ + case(#err(err)){return _not_found("location type not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + let read_type = switch(Metadata.get_nft_text_property(library_meta, "read")){ + case(#err(err)){return _not_found("read type not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + let location = switch(Metadata.get_nft_text_property(library_meta, "location")){ + case(#err(err)){return _not_found("location type not found" # token_id # " " # library_id);}; + case(#ok(val)){val}; + }; + + let use_token_id = if(location_type == "canister"){ + debug if(debug_channel.library) D.print("location type is canister"); + token_id; + } else if(location_type == "collection"){ + debug if(debug_channel.library) D.print("location type is collection"); + ""; + } else if(location_type == "web"){ + return { + body = ""; + headers = [("Location", location)]; + status_code = 307; + streaming_strategy = null; + }; + + }else { + return _not_found("library hosted off chain - " # token_id # " " # library_id # " " # location_type); + }; + + debug if(debug_channel.library) D.print("comparing library in allocation" # debug_show((use_token_id, library_id, state.state.allocations))); + let allocation = switch(Map.get<(Text, Text), Types.AllocationRecord>(state.state.allocations, (NFTUtils.library_hash, NFTUtils.library_equal), (use_token_id, library_id))){ + case(null){ + return _not_found("allocation for token, library not found - " # use_token_id # " " # library_id); + }; + case(?val){val}; + }; + + debug if(debug_channel.library) D.print("found allocation" # debug_show((allocation.canister, state.canister()))); + + + if(read_type == "owner"){ + switch(http_nft_owner_check(state, req, metadata)) { + case(#err(err)) { + return _not_found(err); + }; + case(#ok()) {}; + }; + }; + + if(read_type == "collection_owner"){ + switch(http_owner_check(state, req)) { + case(#err(err)) { + return _not_found(err); + }; + case(#ok()) {}; + }; + }; + + if(location_type == "canister"){ + //on this canister + debug if(debug_channel.library) D.print("canister"); + let content_type = switch(Metadata.get_nft_text_property(library_meta, "content_type")){ + case(#err(err)){return _not_found("content type not found");}; + case(#ok(val)){val}; + }; + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){return _not_found("item not found")}; + case(#ok(val)){val}; + }; + + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return _not_found("file data not found"); + }; + case(?zone){ + debug if(debug_channel.library) D.print("size of zone" # debug_show(zone.size())); + + var split : [Text] = []; + var split2 : [Text] = []; + var start : ?Nat = null; + var end : ?Nat = null; + var b_foundRange : Bool = false; + for(this_header in req.headers.original.vals()){ + + if(this_header.0 == "range" or this_header.0 == "Range"){ + b_foundRange := true; + split := Iter.toArray(Text.tokens(this_header.1, #char('='))); + split2 := Iter.toArray(Text.tokens(split[1],#char('-'))); + if(split2.size() == 1){ + start := Conversion.textToNat(split2[0]); + } else { + start := Conversion.textToNat(split2[0]); + end := Conversion.textToNat(split2[1]); + }; + debug if(debug_channel.library) D.print("split2 " # debug_show(split2)); + }; + }; + + + if(b_foundRange == true){ + //range request + debug if(debug_channel.library) D.print("dealing with a range request"); + let result = handle_stream_content( + state, + token_id, + library_id, + start, + end, + content_type, + zone, + req + ); + debug if(debug_channel.library)D.print("returning with callback:"); + debug if(debug_channel.library)D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + + } else { + debug if(debug_channel.library)D.print("Not a range requst"); + + /* + remove this comment to get a dump of the actual headers that made it through. + return { + status_code = 200; + headers = [("Content-Type", "text/plain")]; + body = Conversion.valueToBlob(#Text(debug_show(req.headers.original) # "|||" # debug_show(req.original.headers))); + streaming_strategy = null; + }; */ + //standard content request + if(zone.size() > 1){ + //streaming required + let result = handleLargeContent( + state, + "nft/" # token_id # "|" # library_id, + content_type, + zone, + req + ); + debug if(debug_channel.library)D.print("returning with callback"); + debug if(debug_channel.library)D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + } else { + //only one chunck + return { + status_code = 200; + headers = [("Content-Type", content_type)]; + body = Conversion.valueUnstableToBlob(zone.get(0)); + streaming_strategy = null; + }; + }; + }; + + }; + }; + } else if(location_type == "collection"){ + //on this canister but with collection id + debug if(debug_channel.library)D.print("collection"); + + let use_token_id = ""; + + + let content_type = switch(Metadata.get_nft_text_property(library_meta, "content_type")){ + case(#err(err)){return _not_found("content type not found");}; + case(#ok(val)){val}; + }; + + debug if(debug_channel.library)D.print("collection content type is " # content_type); + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, use_token_id, library_id)){ + case(#err(err)){return _not_found("item not found")}; + case(#ok(val)){val}; + }; + + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return _not_found("file data not found"); + }; + case(?zone){ + debug if(debug_channel.library) D.print("size of zone"); + debug if(debug_channel.library) D.print(debug_show(zone.size())); + + var split : [Text] = []; + var split2 : [Text] = []; + var start : ?Nat = null; + var end : ?Nat = null; + var b_foundRange : Bool = false; + + + + for(this_header in req.headers.original.vals()){ + + if(this_header.0 == "range" or this_header.0 == "Range"){ + b_foundRange := true; + split := Iter.toArray(Text.tokens(this_header.1, #char('='))); + split2 := Iter.toArray(Text.tokens(split[1],#char('-'))); + if(split2.size() == 1){ + start := Conversion.textToNat(split2[0]); + } else { + start := Conversion.textToNat(split2[0]); + end := Conversion.textToNat(split2[1]); + }; + }; + }; + + + if(b_foundRange == true){ + //range request + debug if(debug_channel.library) D.print("dealing with a range request"); + let result = handle_stream_content( + state, + use_token_id, + library_id, + start, + end, + content_type, + zone, + req + ); + debug if(debug_channel.library) D.print("returning with callback:"); + debug if(debug_channel.library) D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + + } else { + debug if(debug_channel.library) D.print("Not a range requst"); + + /* + remove this comment to get a dump of the actual headers that made it through. + return { + status_code = 200; + headers = [("Content-Type", "text/plain")]; + body = Conversion.valueToBlob(#Text(debug_show(req.headers.original) # "|||" # debug_show(req.original.headers))); + streaming_strategy = null; + }; */ + //standard content request + if(zone.size() > 1){ + //streaming required + let result = handleLargeContent( + state, + "nft/" # use_token_id # "|" # library_id, + content_type, + zone, + req + ); + debug if(debug_channel.library) D.print("returning with callback"); + debug if(debug_channel.library) D.print(debug_show(Option.isSome(result.streaming_strategy))); + return result; + } else { + //only one chunck + return { + status_code = 200; + headers = [("Content-Type", content_type)]; + body = Conversion.valueUnstableToBlob(zone.get(0)); + streaming_strategy = null; + }; + }; + }; + + }; + }; + + + + } else { + //redirect to asset + let location = switch(Metadata.get_nft_text_property(library_meta, "location")){ + case(#err(err)){return _not_found("location not found");}; + case(#ok(val)){val}; + }; + debug if(debug_channel.library) D.print("redirecting to asset" # location); + return { + body = ""; + headers = [("Location", location)]; + status_code = 307; + streaming_strategy = null; + }; + }; + }; + + public func renderSmartRoute( + state : Types.StorageState, + req : httpparser.ParsedHttpRequest, + metadata : CandyTypes.CandyValue, + token_id: Text, smartRoute: Text) : HTTPResponse { + //D.print("path is ex"); + let library_id = switch(Metadata.get_nft_text_property(metadata, smartRoute)){ + case(#err(err)){return _not_found("library not found");}; + case(#ok(val)){val}; + }; + //D.print(library_id); + + return renderLibrary(state, req, metadata, token_id, library_id); + }; + + //standard response for a 404 + private func _not_found(message: Text) : HTTPResponse{ + return{ + body = Text.encodeUtf8("404 Not found :" # message); + headers : [http.HeaderField] = []; + status_code : Nat16= 404; + streaming_strategy : ?StreamingStrategy = null; + }; + }; + + public func nftStreamingCallback( + tk : StreamingCallbackToken, + state: Types.StorageState) : StreamingCallbackResponse { + debug if(debug_channel.streaming) D.print("in streaming callback"); + let path = Iter.toArray(Text.tokens(tk.key, #text("/"))); + debug if(debug_channel.streaming) D.print(debug_show(path)); + if (path.size() == 2 and path[0] == "nft") { + debug if(debug_channel.streaming) D.print("private nft"); + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + + let (token_id, library_id) = if(path2.size() == 1){ + ("", path2[0]); + } else { + ( path2[0], path2[1]); + }; + debug if(debug_channel.streaming) D.print(debug_show(path2)); + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){ + debug if(debug_channel.streaming) D.print("an error" # debug_show(err)); + return { + body = Blob.fromArray([]); + token = null; + }}; + case(#ok(val)){val}; + }; + + switch(item.getOpt(1)){ + case(null){ + //nofiledata + return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(?zone){ + return stream_content( + tk.key, + tk.index, + zone, + ); + }; + }; + + + } else if(path.size() == 2 and path[0] == "nft-m"){ + //have to get data differently + debug if(debug_channel.streaming) D.print("in media pathway"); + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + //todo: handle private nft + let (token_id, library_id, rStartText, rEndText, sizeText) = if(path2.size() == 1){ + ("", path2[0], path2[1], path2[2], path2[3]); + } else { + ( path2[0], path2[1], path2[2], path2[3], path2[4]); + }; + debug if(debug_channel.streaming) D.print(debug_show(path2)); + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){ + debug if(debug_channel.streaming) D.print("no item"); + return { + body = Blob.fromArray([]); + token = null; + }}; + case(#ok(val)){val}; + }; + switch(item.getOpt(1)){ + case(null){ + //nofiledata + debug if(debug_channel.streaming) D.print("no file bytes found"); + return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(?zone){ + debug if(debug_channel.streaming) D.print("about to call stream media from the callback pathway"); + let rStart = Option.get(Conversion.textToNat(rStartText),0); + let rEnd = Option.get(Conversion.textToNat(rEndText),0); + let size = Option.get(Conversion.textToNat(sizeText),0); + debug if(debug_channel.streaming) D.print(debug_show(rStart, rEnd, size)); + return stream_media( + token_id, + library_id, + tk.index, + zone, + rStart, + rEnd, + size + ); + }; + }; + + }; + { + body = Blob.fromArray([]); + token = null; + }; + }; + + private func stream_content( + key : Text, + index : Nat, + data : CandyTypes.DataZone, + ) : StreamingCallbackResponse { + let result = _stream_content( + key, + index, + data, + ); + + D.print("the stream content " # key); + D.print(debug_show(result)); + { + body = result.payload; + token = result.callback; + }; + }; + + public func http_request_streaming_callback( + tk : StreamingCallbackToken, + state : Types.StorageState) : StreamingCallbackResponse { + + debug if(debug_channel.large_content) D.print("in the request_streamint callbak"); + debug if(debug_channel.large_content) D.print(debug_show(tk)); + if (Text.startsWith(tk.key, #text("nft/"))) { + let path = Iter.toArray(Text.tokens(tk.key, #text("/"))); + + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + + + //nyi: handle private nft + debug if(debug_channel.large_content) D.print(debug_show(path)); + debug if(debug_channel.large_content) D.print(debug_show(path2)); + + let (token_id, library_id) = if(path2.size() == 1){ + ("", path2[0]); + } else { + ( path2[0], path2[1]); + }; + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(#ok(val)){val}; + }; + + //D.print("have item"); + + switch (item.getOpt(1)) { + case (null) { }; + case (?zone) { + return stream_content( + tk.key, + tk.index, + zone, + ); + }; + }; + } else if (Text.startsWith(tk.key, #text("nft-m/"))){ + let path = Iter.toArray(Text.tokens(tk.key, #text("/"))); + + let path2 = Iter.toArray(Text.tokens(path[1], #text("|"))); + //nyi: handle private nft + debug if(debug_channel.large_content) D.print(debug_show(path)); + debug if(debug_channel.large_content) D.print(debug_show(path2)); + + let (token_id, library_id, rStartText, rEndText, sizeText) = if(path2.size() == 1){ + ("", path2[0], path2[1], path2[2], path2[3]); + } else { + ( path2[0], path2[1], path2[2], path2[3], path2[4]); + }; + + let item = switch(Metadata.get_library_item_from_store(state.nft_library, token_id, library_id)){ + case(#err(err)){return { + body = Blob.fromArray([]); + token = null; + }; + }; + case(#ok(val)){val}; + }; + + debug if(debug_channel.large_content) //D.print("have item"); + + switch (item.getOpt(1)) { + case (null) { }; + case (?zone) { + return stream_media( + token_id, + library_id, + + tk.index, + zone, + Option.get(Conversion.textToNat(rStartText),0),//rstart + + Option.get(Conversion.textToNat(rEndText),0),//rend + Option.get(Conversion.textToNat(sizeText),0),//size + ); + }; + }; + + } else { + //handle static assests if we have them + }; + return { + body = Blob.fromArray([]); + token = null; + }; + }; + + //pulls + private func json(message: CandyTypes.CandyValue, _query: ?Text) : HTTPResponse { + let message_response = switch(_query) { + case(null) { + message + }; + case(?q) { + switch(splitQuery(Text.replace(q, #text("--"), "~"), '~')) { + case(#ok(qs)) { + switch(get_deep_properties(message, qs)) { + case(#ok(data)) { + data; + }; + case(#back){ + message; + }; + case(#err(err)) { + return _not_found("properties not found: " # q); + }; + + }; + }; + case(#err(err)) { + return _not_found(err); + }; + /* case(_){ + return _not_found("unexpected value: " # debug_show(message)); + }; */ + }; + }; + }; + + return { + body = Text.encodeUtf8(value_to_json(message_response)); + headers = [(("Content-Type", "application/json")),(("Access-Control-Allow-Origin", "*"))]; + status_code = 200; + streaming_strategy = null; + }; + }; + + type sQuery = { #standard: Text; #multi: Text }; + //handles queries + public func splitQuery(q: Text, p: Char): Result.Result, Text> { + var queries = List.nil(); + var key : Text = ""; + var multi : Bool = false; + var open : Bool = false; + + let addQueries = func(key: Text, current: List.List, multi: Bool): Result.Result, Text> { + //D.print(debug_show(multi, key)); + if(multi) { + if(Text.contains(key, #char(p))) { + return #err("multi: not supported split") + }; + #ok(List.push(#multi(key), current)); + } else { + if(Text.contains(key, #char(','))) { + return #err("Standard: not supported [,]"); + }; + #ok(List.push(#standard(key), current)); + }; + }; + + for(thisChar in Text.toIter(q)) { + if(thisChar == '[') { + open := true; + multi := true; + } else if(thisChar == ']') { + open := false; + } else { + if(thisChar == p and open == false) { + switch(addQueries(key, queries, multi)) { + case(#ok(res)){queries:=res;}; + case(err){return err;}; + }; + multi := false; + key := ""; + } else { + key:= key # Char.toText(thisChar); + }; + }; + }; + + switch(addQueries(key, queries, multi)) { + case(#ok(res)){queries:=res;}; + case(err){return err;}; + }; + return #ok(List.reverse(queries)); + }; + + //gets prroperties from deep in a structure + public func get_deep_properties(metadata: CandyTypes.CandyValue, qs: List.List): {#ok: CandyTypes.CandyValue; #err: (); #back: ()} { + if(List.isNil(qs)) { + return #back(); + }; + + let item = List.pop(qs); + + let key = switch(item.0){ + case(null){return #err;}; + case(?val){val;}; + }; + let listQs = item.1; + + switch(metadata) { + case(#Class(properties)) { + switch(key) { + case(#standard(standard)) { + switch(Properties.getClassProperty(metadata, standard)){ + case(null) { + return #err(); + }; + case(?val){ + switch(get_deep_properties(val.value, listQs)) { + case(#ok(res)){#ok(res);}; + case(#back()){#ok(#Class([val]));}; + case(err){err;}; + }; + }; + }; + }; + case(#multi(multi)) { + if(List.isNil(listQs)) { + let props = Array.map( + split_text(multi, ','), + func (key: Text): CandyTypes.Query { + return { + name = key; + next = []; + }; + } + ); + return switch(Properties.getProperties(properties, props)) { + case(#ok(val)){#ok(#Class(val));}; + case(#err(err)){#err()}; + }; + } else { + return #err(); + }; + }; + }; + }; + case(#Array(_)) { + switch(key) { + case(#standard(standard)) { + var len = 0; + for(this_item in Conversion.valueToValueArray(metadata).vals()) { + if(Nat.toText(len) == standard) { + switch(get_deep_properties(this_item, listQs)) { + case(#ok(res)){return #ok(res);}; + case(#back()){return #ok(this_item);}; + case(err){return err;}; + }; + }; + len := len + 1; + }; + }; + case(#multi(multi)) { + var splitMulti: [Text] = split_text(multi, ','); + let list: Buffer.Buffer = Buffer.Buffer(1); + var len = 0; + for(this_item in Conversion.valueToValueArray(metadata).vals()) { + switch(Array.find(splitMulti, func (key: Text) { + return key == Nat.toText(len); + })) { + case(null) {}; + case(?find) { + switch(get_deep_properties(this_item, listQs)) { + case(#ok(res)){ + list.add(res); + }; + case(#back()){ + list.add(this_item); + }; + case(err){return err;}; + }; + }; + }; + len := len + 1; + }; + + if(list.size() == splitMulti.size()) { + return #ok(#Array(#thawed(list.toArray()))); + } else { + return #err(); + }; + }; + }; + + return #err(); + }; + case(_) { + if(List.isNil(qs)) { + return #back(); + }; + + return #err(); + }; + }; + }; + + //converst a candu value to JSON + public func value_to_json(val: CandyTypes.CandyValue): Text { + switch(val){ + //nat + case(#Nat(val)){ Nat.toText(val)}; + //text + case(#Text(val)){ "\"" # val # "\""; }; + //class + case(#Class(val)){ + var body: Buffer.Buffer = Buffer.Buffer(1); + for(this_item in val.vals()){ + body.add("\"" # this_item.name # "\"" # ":" # value_to_json(this_item.value)); + }; + + return "{" # Text.join(",", body.vals()) # "}"; + }; + //array + case(#Array(val)){ + switch(val){ + case(#frozen(val)){ + var body: Buffer.Buffer = Buffer.Buffer(1); + for(this_item in val.vals()){ + body.add(value_to_json(this_item)); + }; + + return "[" # Text.join(",", body.vals()) # "]"; + }; + case(#thawed(val)){ + var body: Buffer.Buffer = Buffer.Buffer(1); + for(this_item in val.vals()){ + body.add(value_to_json(this_item)); + }; + + return "[" # Text.join(",", body.vals()) # "]"; + }; + }; + }; + //bytes + case(#Bytes(val)){ + switch(val){ + case(#frozen(val)){ + return "\"" # "CandyHex.encode" # "\"";//CandyHex.encode(val); + }; + case(#thawed(val)){ + return "\"" # "CandyHex.encode" # "\"";//CandyHex.encode(val); + }; + }; + }; + //bytes + case(#Blob(val)){ + + return "\"" # "CandyHex.encode" # "\"";//CandyHex.encode(val); + + }; + //principal + case(#Principal(val)){ "\"" # Principal.toText(val) # "\"";}; + case(_){"";}; + }; + }; + + public func split_text(q: Text, p: Char): [Text] { + var queries: Buffer.Buffer = Buffer.Buffer(1); + var key : Text = ""; + + for(thisChar in Text.toIter(q)) { + if(thisChar != '[' and thisChar != ']') { + if(thisChar == p) { + queries.add(key); + key := ""; + } else { + key:= key # Char.toText(thisChar); + }; + }; + }; + queries.add(key); + return queries.toArray(); + }; + + //checks that a access token holder is the collection owner + public func http_owner_check(stateBody : Types.StorageState, req : httpparser.ParsedHttpRequest): Result.Result<(), Text> { + switch(req.url.queryObj.get("access")) { + case(null) { + return #err("no access code in request when nft not minted"); + }; + case(?access_token) { + switch(stateBody.tokens.get(access_token)) { + case(null) { + return #err("identity not found by access_token : " # access_token); + }; + case(?info) { + let { identity; expires; } = info; + + if(stateBody.state.collection_data.owner != identity) { + return #err("not an owner"); + }; + + if(expires < Time.now()) { + return #err("access expired"); + }; + }; + }; + }; + }; + + #ok(); + }; + + + //checks that a access token holder is an owner of an NFT + //**NOTE: NOTE: Data stored on the IC should not be considered secure. It is possible(though not probable) that node operators could look at the data at rest and see access tokens. The only current method for hiding data from node providers is to encrypt the data before putting it into a canister. It is highly recommended that any personally identifiable information is encrypted before being stored on a canister with a separate and secure decryption system in place.** + public func http_nft_owner_check(stateBody : Types.StorageState, req : httpparser.ParsedHttpRequest, metadata: CandyTypes.CandyValue): Result.Result<(), Text> { + switch(req.url.queryObj.get("access")) { + case(null) { + return #err("no access code in request when nft not minted"); + }; + case(?access_token) { + return #err("access token not yet supported for multi-canister colletions"); + }; + }; + + #ok(); + }; + + //handles http requests + public func http_request( + state : Types.StorageState, + rawReq: HttpRequest, + caller : Principal): (HTTPResponse) { + + debug if(debug_channel.request) D.print("a page was requested"); + + let req = httpparser.parse(rawReq); + let {host; port; protocol; path; queryObj; anchor; original = url} = req.url; + + + let path_size = req.url.path.array.size(); + let path_array = req.url.path.array; + + + debug if(debug_channel.request) D.print(debug_show(rawReq)); + + if(path_size == 0) { + return { + body = Text.encodeUtf8 (" An Origyn NFT Canister \n"); + headers = []; + status_code = 200; + streaming_strategy = null; + }; + }; + + + if(path_size > 0){ + if(path_array[0] == "-"){ + if(path_size > 1){ + debug if(debug_channel.request) D.print("on path print area"); + debug if(debug_channel.request) D.print(debug_show(path_size)); + let token_id = path_array[1]; + + let metadata = switch(Map.get(state.state.nft_metadata, Map.thash, token_id)){ + case(null){ + return _not_found("metadata not found"); + }; + case(?val){ + val; + }; + }; + let is_minted = Metadata.is_minted(metadata); + if(path_size == 2){ + //show the main asset + debug if(debug_channel.request) D.print("should be showing the main asset unless unmited" # debug_show(is_minted)); + if(is_minted == false){ + return renderSmartRoute(state, req, metadata, token_id, Types.metadata.hidden_asset); + }; + return renderSmartRoute(state, req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_size == 3){ + if(path_array[2] == "ex"){ + let aResponse = renderSmartRoute(state ,req, metadata, token_id, Types.metadata.experience_asset); + if(is_minted == false and aResponse.status_code==404){ + return renderSmartRoute(state ,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return aResponse; + }; + if(path_array[2] == "preview"){ + if(is_minted == false){ + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.preview_asset); + }; + if(path_array[2] == "hidden"){ + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + if(path_array[2] == "primary"){ + if(is_minted == false){ + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_array[2] == "info"){ + return json(Metadata.get_clean_metadata(metadata, caller), queryObj.get("query")); + }; + if(path_array[2] == "library"){ + let libraries = switch(Metadata.get_nft_library(Metadata.get_clean_metadata(metadata, caller), ?caller)){ + case(#err(err)){return _not_found("libraries not found");}; + case(#ok(val)){ val }; + }; + return json(libraries, null); + }; + }; + if(path_size > 3){ + if(path_array[2] == "-") { + let library_id = path_array[3]; + if(path_size == 4){ + if (is_minted == false) { + switch(http_owner_check(state, req)) { + case(#err(err)) { + return _not_found(err); + }; + case(#ok()) {}; + }; + }; + + return renderLibrary(state, req, metadata, token_id, library_id); + }; + if(path_size == 5){ + if(path_array[4] == "info"){ + let library_meta = switch(Metadata.get_library_meta(metadata, library_id)){ + case(#err(err)){return _not_found("library by " # library_id # " not found");}; + case(#ok(val)){val}; + }; + return json(library_meta, queryObj.get("query")); + }; + }; + }; + }; + }; + } else if(path_array[0] == "collection"){ + debug if(debug_channel.request) D.print("found collection"); + + + debug if(debug_channel.request) D.print("on path print area"); + debug if(debug_channel.request) D.print(debug_show(path_size)); + let token_id = ""; + + let metadata = switch(Map.get(state.state.nft_metadata, Map.thash,token_id)){ + case(null){ + return _not_found("metadata not found"); + }; + case(?val){ + val; + }; + }; + if(path_size > 1){ + if(path_array[1] == "-"){ + + debug if(debug_channel.request) D.print("found -"); + + if(path_size == 2){ + // https://exos.surf/-/canister_id/collection/ + debug if(debug_channel.request) D.print("render smart route 2 collection" # token_id); + + return renderSmartRoute(state, req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_size > 2){ + + let library_id = path_array[2]; + if(path_size == 3){ + debug if(debug_channel.request) D.print("render library " # token_id # " " # library_id); + // https://exos.surf/-/canister_id/collection/-/library_id + return renderLibrary(state, req, metadata, token_id, library_id); + }; + if(path_size == 4){ + if(path_array[4] == "info"){ + /// https://exos.surf/-/canister_id/collection/-/library_id/info + debug if(debug_channel.request) D.print("render info " # token_id # " " # library_id); + + let library_meta = switch(Metadata.get_library_meta(metadata, library_id)){ + case(#err(err)){return _not_found("library by " # library_id # " not found");}; + case(#ok(val)){val}; + }; + return json(library_meta, queryObj.get("query")); + }; + }; + + }; + }; + if(path_array[1] == "ex"){ + debug if(debug_channel.request) D.print("render ex " # token_id ); + let aResponse = renderSmartRoute(state ,req, metadata, token_id, Types.metadata.experience_asset); + if(aResponse.status_code==404){ + return renderSmartRoute(state ,req, metadata, token_id, Types.metadata.hidden_asset); + }; + return aResponse; + }; + if(path_array[1] == "preview"){ + debug if(debug_channel.request) D.print("render perview " # token_id ); + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.preview_asset); + }; + if(path_array[1] == "hidden"){ + debug if(debug_channel.request) D.print("render hidden " # token_id ); + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.hidden_asset); + }; + if(path_array[1] == "primary"){ + debug if(debug_channel.request) D.print("render primary " # token_id ); + return renderSmartRoute(state,req, metadata, token_id, Types.metadata.primary_asset); + }; + if(path_array[1] == "info"){ + debug if(debug_channel.request) D.print("render info " # token_id ); + return json(Metadata.get_clean_metadata(metadata, caller), queryObj.get("query")); + }; + if(path_array[1] == "library"){ + debug if(debug_channel.request) D.print("render library " # token_id ); + let libraries = switch(Metadata.get_nft_library(Metadata.get_clean_metadata(metadata, caller), ?caller)){ + case(#err(err)){return _not_found("libraries not found");}; + case(#ok(val)){ val }; + }; + return json(libraries, null); + }; + }; + } else if(path_array[0] == "metrics"){ + return { + body = Text.encodeUtf8("Metrics page :"); + headers = []; + status_code = 200; + streaming_strategy = null; + }; + }; + }; + + return _not_found("nyi"); + }; + + +} diff --git a/src/origyn_nft_reference/storage_store.mo b/src/origyn_nft_reference/storage_store.mo new file mode 100644 index 0000000..1888f4b --- /dev/null +++ b/src/origyn_nft_reference/storage_store.mo @@ -0,0 +1,184 @@ +import Types "types"; +import Result "mo:base/Result"; +import CandyTypes "mo:candy_0_1_10/types"; +import Iter "mo:base/Iter"; +import Blob "mo:base/Blob"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Text "mo:base/Text"; +import Properties "mo:candy_0_1_10/properties"; +import Workspace "mo:candy_0_1_10/workspace"; +import Metadata "metadata"; +import TrieMap "mo:base/TrieMap"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Buffer "mo:base/Buffer"; +import Time "mo:base/Time"; +import NFTUtils "utils"; +import Map "mo:map_6_0_0/Map"; +import SB "mo:stablebuffer_0_2_0/StableBuffer"; +import MigrationTypes "./migrations/types"; + +module { + + let debug_channel = { + stage = false; + }; + + + public func stage_library_nft_origyn( + state : Types.StorageState, + chunk : Types.StageChunkArg, + source_allocation : Types.AllocationRecordStable, + metadata: CandyTypes.CandyValue, //do we need metadata here? probably for http request...surely for file data + caller : Principal) : async Result.Result { + + if(state.state.collection_data.owner != caller){return #err(Types.errors(#unauthorized_access, "stage_library_nft_origyn - storage - not the gateway", ?caller))}; + + + debug if(debug_channel.stage) D.print("in the remote canister"); + //make sure we have an allocation for space for this chunk + let allocation = switch(Map.get<(Text, Text), Types.AllocationRecord>(state.state.allocations,( NFTUtils.library_hash, NFTUtils.library_equal), (chunk.token_id, chunk.library_id))){ + case(null){ + debug if(debug_channel.stage) D.print("no allocation yet, so lets add it"); + + + let allocation = { + canister = source_allocation.canister; + allocated_space = source_allocation.allocated_space; + var available_space = source_allocation.available_space; + var chunks = SB.fromArray(source_allocation.chunks); + token_id = source_allocation.token_id; + library_id = source_allocation.library_id; + }; + debug if(debug_channel.stage) D.print("this is the allocation to be added " # debug_show(allocation)); + Map.set<(Text,Text),Types.AllocationRecord>(state.state.allocations,( NFTUtils.library_hash, NFTUtils.library_equal), (chunk.token_id, chunk.library_id), + allocation + ); + //this is where we remove the space for the whole allocation + state.state.canister_availible_space -= source_allocation.allocated_space; + allocation; + + }; + case(?val)(val); + }; + + if(chunk.chunk == 0){ + //the first chunk comes with the metadata + Map.set(state.state.nft_metadata, Map.thash, chunk.token_id, metadata); + }; + + debug if(debug_channel.stage) D.print("looking for workspace"); + var found_workspace : CandyTypes.Workspace = + switch(state.nft_library.get(chunk.token_id)){ + case(null){ + //chunk doesn't exist; + debug if(debug_channel.stage) D.print("does not exist"); + let new_workspace = Workspace.initWorkspace(2); + debug if(debug_channel.stage) D.print("puting Zone"); + debug if(debug_channel.stage) D.print(debug_show(chunk.filedata)); + + + + new_workspace.add(Workspace.initDataZone(CandyTypes.destabalizeValue(chunk.filedata))); + + debug if(debug_channel.stage) D.print("put the zone"); + var new_library = TrieMap.TrieMap(Text.equal, Text.hash); + debug if(debug_channel.stage) D.print("putting workspace"); + new_library.put(chunk.library_id, new_workspace); + debug if(debug_channel.stage) D.print("putting library"); + state.nft_library.put(chunk.token_id, new_library); + new_workspace; + }; + case(?library){ + switch(library.get(chunk.library_id)){ + case(null){ + debug if(debug_channel.stage) D.print("nft exists but not file"); + //nft exists but this file librry entry doesnt exist + //nftdoesn't exist; + let new_workspace = Workspace.initWorkspace(2); + + new_workspace.add(Workspace.initDataZone(CandyTypes.destabalizeValue(chunk.filedata))); + + + library.put(chunk.library_id, new_workspace); + + + new_workspace; + }; + case(?workspace){ + //D.print("found workspace"); + workspace; + }; + }; + + }; + }; + + //file the chunk + //D.print("filing the chunk"); + let file_chunks = switch(found_workspace.getOpt(1)){ + case(null){ + if(found_workspace.size()==0){ + //todo: should be an error because no filedata + found_workspace.add(Workspace.initDataZone(#Empty)); + }; + if(found_workspace.size()==1){ + found_workspace.add(Buffer.Buffer(0)); + }; + found_workspace.get(1); + }; + case(?dz){ + dz; + }; + }; + + + if(chunk.chunk + 1 <= SB.size(allocation.chunks)){ + //this chunk already exists in the allocatioin + //see what size it is + let current_size = SB.get(allocation.chunks,chunk.chunk); + if(chunk.content.size() > current_size){ + //allocate more space + SB.put(allocation.chunks, chunk.chunk, chunk.content.size()); + allocation.available_space += (chunk.content.size() - current_size); + } else if (chunk.content.size() != current_size){ + //give space back + SB.put(allocation.chunks, chunk.chunk, chunk.content.size()); + allocation.available_space -= (current_size - chunk.content.size()); + } else {}; + } else { + for(this_index in Iter.range(SB.size(allocation.chunks), chunk.chunk)){ + if(this_index == chunk.chunk){ + SB.add(allocation.chunks, chunk.content.size()); + allocation.available_space -= chunk.content.size(); + } else { + SB.add(allocation.chunks, 0); + } + }; + }; + + //D.print("putting the chunk"); + if(chunk.chunk + 1 <= file_chunks.size()){ + file_chunks.put(chunk.chunk, #Blob(chunk.content)); + } else { + debug if(debug_channel.stage) D.print("in putting the chunk iter"); + debug if(debug_channel.stage) D.print(debug_show(chunk.chunk)); + debug if(debug_channel.stage) D.print(debug_show(file_chunks.size())); + + for(this_index in Iter.range(file_chunks.size(),chunk.chunk)){ + if(this_index == chunk.chunk){ + file_chunks.add(#Blob(chunk.content)); + } else { + file_chunks.add(#Blob(Blob.fromArray([]))); + } + }; + + }; + + //D.print("returning"); + return #ok({canister = state.canister()}); + }; + + +} \ No newline at end of file diff --git a/src/origyn_nft_reference/types.mo b/src/origyn_nft_reference/types.mo new file mode 100644 index 0000000..9b25003 --- /dev/null +++ b/src/origyn_nft_reference/types.mo @@ -0,0 +1,1397 @@ + +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Blob "mo:base/Blob"; +import Buffer "mo:base/Buffer"; +import Candy "mo:candy_0_1_10/types"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import DIP721 "DIP721"; +import EXT "mo:ext/Core"; +import Nat32 "mo:base/Nat32"; +import EXTCommon "mo:ext/Common"; +import Iter "mo:base/Iter"; +import Map "mo:map_6_0_0/Map"; +import MigrationTypes "./migrations/types"; +import NFTUtils "mo:map_6_0_0/utils"; +import Order "mo:base/Order"; +import Principal "mo:base/Principal"; +import Result "mo:base/Result"; +import SB "mo:stablebuffer_0_2_0/StableBuffer"; +import StorageMigrationTypes "./migrations_storage/types"; +import Text "mo:base/Text"; +import Time "mo:base/Time"; +import hex "mo:encoding/Hex"; +import TrieMap "mo:base/TrieMap"; + +module { + + public type InitArgs = { + owner: Principal.Principal; + storage_space: ?Nat; + }; + + public type StorageInitArgs = { + gateway_canister: Principal; + network: ?Principal; + storage_space: ?Nat; + }; + + public type StorageMigrationArgs = { + gateway_canister: Principal; + network: ?Principal; + storage_space: ?Nat; + caller: Principal; + }; + + public type ManageCollectionCommand = { + #UpdateManagers : [Principal]; + #UpdateOwner : Principal; + #UpdateNetwork : ?Principal; + #UpdateLogo : ?Text; + #UpdateName : ?Text; + #UpdateSymbol : ?Text; + #UpdateMetadata: (Text, ?CandyTypes.CandyValue, Bool); + }; + + // RawData type is a tuple of Timestamp, Data, and Principal + public type RawData = (Int, Blob, Principal); + + public type HttpRequest = { + body: Blob; + headers: [HeaderField]; + method: Text; + url: Text; + }; + + public type StreamingCallbackToken = { + content_encoding: Text; + index: Nat; + key: Text; + //sha256: ?Blob; + }; + public type StreamingCallbackHttpResponse = { + body: Blob; + token: ?StreamingCallbackToken; + }; + public type ChunkId = Nat; + public type SetAssetContentArguments = { + chunk_ids: [ChunkId]; + content_encoding: Text; + key: Key; + sha256: ?Blob; + }; + public type Path = Text; + public type Key = Text; + + public type HttpResponse = { + body: Blob; + headers: [HeaderField]; + status_code: Nat16; + streaming_strategy: ?StreamingStrategy; + }; + + public type StreamingStrategy = { + #Callback: { + callback: shared () -> async (); + token: StreamingCallbackToken; + }; + }; + + public type HeaderField = (Text, Text); + + public type canister_id = Principal; + + public type definite_canister_settings = { + freezing_threshold : Nat; + controllers : ?[Principal]; + memory_allocation : Nat; + compute_allocation : Nat; + }; + + public type canister_status = { + status : { #stopped; #stopping; #running }; + memory_size : Nat; + cycles : Nat; + settings : definite_canister_settings; + module_hash : ?[Nat8]; + }; + + public type IC = actor { + canister_status : { canister_id : canister_id } -> async canister_status; + }; + + public type StageChunkArg = { + token_id: Text; + library_id: Text; + filedata: CandyTypes.CandyValue;//may need to be nullable + chunk: Nat; //2MB Chunks + content: Blob; + }; + + + public type ChunkRequest = { + token_id: Text; + library_id: Text; + chunk: ?Nat; + }; + + public type ChunkContent = { + #remote : { + canister: Principal; + args: ChunkRequest; + }; + #chunk : { + content: Blob; + total_chunks: Nat; + current_chunk: ?Nat; + storage_allocation: AllocationRecordStable; + }; + }; + + public type MarketTransferRequest = { + token_id: Text; + sales_config: SalesConfig; + }; + + public type OwnerTransferResponse = { + transaction: TransactionRecord; + assets: [CandyTypes.CandyValue]; + }; + + public type ShareWalletRequest = { + token_id: Text; + from: Account; + to: Account; + }; + + public type SalesConfig = { + escrow_receipt : ?EscrowReceipt; + broker_id : ?Principal; + pricing: PricingConfig; + }; + + public type ICTokenSpec = MigrationTypes.Current.ICTokenSpec; + + public type TokenSpec = MigrationTypes.Current.TokenSpec; + + public let TokenSpecDefault = #extensible(#Empty); + + + //nyi: anywhere a deposit address is used, check blob for size in inspect message + public type SubAccountInfo = { + principal : Principal; + account_id : Blob; + account_id_text: Text; + account: { + principal: Principal; + sub_account: Blob; + }; + }; + + public type EscrowReceipt = MigrationTypes.Current.EscrowReceipt; + + public type EscrowRequest = { + token_id : Text; //empty string for general escrow + deposit : DepositDetail; + lock_to_date: ?Int; //timestamp to lock escrow until. + }; + + + + + public type DepositDetail = { + token : TokenSpec; + seller: Account; + buyer : Account; + amount: Nat; //Nat to support cycles; + sale_id: ?Text; + trx_id : ?TransactionID; //null for account based ledgers + }; + + //used to identify the transaction in a remote ledger; usually a nat on the IC + public type TransactionID = MigrationTypes.Current.TransactionID; + + public type EscrowResponse = { + receipt: EscrowReceipt; + balance: Nat; + transaction: TransactionRecord; + }; + + public type BidRequest = { + escrow_receipt: EscrowReceipt; + sale_id: Text; + broker_id: ?Principal; + }; + + public type BidResponse = TransactionRecord; + + public type PricingConfig = MigrationTypes.Current.PricingConfig; + + public type AuctionConfig = MigrationTypes.Current.AuctionConfig; + + + public let AuctionConfigDefault = { + reserve = null; + token = TokenSpecDefault; + buy_now = null; + start_price = 0; + start_date = 0; + ending = #date(0); + min_increase = #amount(0); + }; + + public type NFTInfoStable = { + current_sale : ?SaleStatusStable; + metadata : CandyTypes.CandyValue; + }; + + + + public type AuctionState = MigrationTypes.Current.AuctionState; + + public type AuctionStateStable = { + config: PricingConfig; + current_bid_amount: Nat; + current_broker_id: ?Principal; + end_date: Int; + min_next_bid: Nat; + current_escrow: ?EscrowReceipt; + wait_for_quiet_count: ?Nat; + allow_list: ?[(Principal,Bool)]; // user, tree + participants: [(Principal,Int)]; //user, timestamp of last access + status: { + #open; + #closed; + #not_started; + }; + winner: ?Account; + }; + + public func AuctionState_stabalize_for_xfer(val : AuctionState) : AuctionStateStable{ + { + config = val.config; + current_bid_amount = val.current_bid_amount; + current_broker_id = val.current_broker_id; + end_date = val.end_date; + min_next_bid = val.min_next_bid; + current_escrow = val.current_escrow; + wait_for_quiet_count = val.wait_for_quiet_count; + allow_list = do ? {Iter.toArray(Map.entries(val.allow_list!))}; + participants = Iter.toArray(Map.entries(val.participants)); + status = val.status; + winner = val.winner; + }; + }; + + public type SaleStatus = MigrationTypes.Current.SaleStatus; + + public type SaleStatusStable = { + sale_id: Text; //sha256?; + original_broker_id: ?Principal; + broker_id: ?Principal; + token_id: Text; + sale_type: { + #auction: AuctionStateStable; + }; + }; + + + public func SalesStatus_stabalize_for_xfer( item : SaleStatus) : SaleStatusStable { + { + sale_id = item.sale_id; + token_id = item.token_id; + broker_id = item.broker_id; + original_broker_id = item.original_broker_id; + sale_type = switch(item.sale_type){ + case(#auction(val)){ + #auction(AuctionState_stabalize_for_xfer(val)); + } + }; + } + }; + + public type MarketTransferRequestReponse = TransactionRecord; + + public type Account = MigrationTypes.Current.Account; + + public type HttpAccess= { + identity: Principal; + expires: Time.Time; + }; + + public type State = State_v0_1_0; + + public type State_v0_1_0 = { + state : GatewayState_v0_1_0; + canister : () -> Principal; + get_time: () -> Int; + nft_library : TrieMap.TrieMap>; + access_tokens : TrieMap.TrieMap; + refresh_state: () -> State; + }; + + public type GatewayState = GatewayState_v0_1_0; + + public type GatewayState_v0_1_0 = MigrationTypes.Current.State; + + public type StorageState = StorageState_v_0_1_0; + + public type StorageState_v_0_1_0 ={ + + var state : StorageMigrationTypes.Current.State; + canister : () -> Principal; + get_time: () -> Int; + var nft_library : TrieMap.TrieMap>; + tokens : TrieMap.TrieMap; + + refresh_state: () -> StorageState_v_0_1_0; + }; + + public type StorageMetrics = { + allocated_storage: Nat; + available_space: Nat; + allocations: [AllocationRecordStable]; + }; + + + + public type BucketData = { + principal : Principal; + var allocated_space: Nat; + var available_space: Nat; + date_added: Int; + b_gateway: Bool; + var version: (Nat, Nat, Nat); + var allocations: Map.Map<(Text,Text), Int>; // (token_id, library_id), Timestamp + }; + + public type AllocationRecord = { + canister : Principal; + allocated_space: Nat; + var available_space: Nat; + var chunks: SB.StableBuffer; + token_id: Text; + library_id: Text; + }; + + public type AllocationRecordStable = { + canister : Principal; + allocated_space: Nat; + available_space: Nat; + chunks: [Nat]; + token_id: Text; + library_id: Text; + }; + + public func allocation_record_stabalize(item:AllocationRecord) : AllocationRecordStable{ + {canister = item.canister; + allocated_space = item.allocated_space; + available_space = item.available_space; + chunks = SB.toArray(item.chunks); + token_id = item.token_id; + library_id = item. library_id;} + }; + + public type TransactionRecord = MigrationTypes.Current.TransactionRecord; + + public type NFTUpdateRequest ={ + #replace:{ + token_id: Text; + data: CandyTypes.CandyValue; + }; + #update:{ + token_id: Text; + app_id: Text; + update: CandyTypes.UpdateRequest; + + } + }; + + public type NFTUpdateResponse = Bool; + + public type EndSaleResponse = TransactionRecord; + + public type EscrowRecord = MigrationTypes.Current.EscrowRecord; + + public type ManageSaleRequest = { + #end_sale : Text; //token_id + #open_sale: Text; //token_id; + #escrow_deposit: EscrowRequest; + #refresh_offers: ?Account; + #bid: BidRequest; + #withdraw: WithdrawRequest; + }; + + public type ManageSaleResponse = { + #end_sale : EndSaleResponse; //trx record if succesful + #open_sale: Bool; //true if opened, false if not; + #escrow_deposit: EscrowResponse; + #refresh_offers: [EscrowRecord]; + #bid: BidResponse; + #withdraw: WithdrawResponse; + }; + + public type SaleInfoRequest = { + #active : ?(Nat, Nat); //get al list of active sales + #history : ?(Nat, Nat); //skip, take + #status : Text; //saleID + #deposit_info : ?Account; + }; + + public type SaleInfoResponse = { + #active: { + records: [(Text, ?SaleStatusStable)]; + eof: Bool; + count: Nat}; + #history : { + records: [?SaleStatusStable]; + eof: Bool; + count : Nat}; + #status: ?SaleStatusStable; + #deposit_info: SubAccountInfo; + }; + + + public type GovernanceRequest = { + #clear_shared_wallets : Text; //token_id of shared wallets to clear + + }; + + public type GovernanceResponse = { + #clear_shared_wallets : Bool; //result + + }; + + + + public type StakeRecord = {amount: Nat; staker: Account; token_id: Text;}; + + public type BalanceResponse = { + multi_canister: ?[Principal]; + nfts: [Text]; + escrow: [EscrowRecord]; + sales: [EscrowRecord]; + stake: [StakeRecord]; + offers: [EscrowRecord]; + }; + + public type LocalStageLibraryResponse = { + #stage_remote : { + allocation :AllocationRecord; + metadata: CandyTypes.CandyValue; + }; + #staged : Principal; + }; + + public type StageLibraryResponse = { + canister: Principal; + }; + + public type WithdrawDescription = { + buyer: Account; + seller: Account; + token_id: Text; + token: TokenSpec; + amount: Nat; + withdraw_to : Account; + }; + + + public type DepositWithdrawDescription = { + buyer: Account; + token: TokenSpec; + amount: Nat; + withdraw_to : Account; + }; + + public type RejectDescription = { + buyer: Account; + seller: Account; + token_id: Text; + token: TokenSpec; + }; + + public type WithdrawRequest = { + #escrow: WithdrawDescription; + #sale: WithdrawDescription; + #reject:RejectDescription; + #deposit: DepositWithdrawDescription; + }; + + + public type WithdrawResponse = TransactionRecord; + + public type CollectionInfo = { + fields: ?[(Text, ?Nat, ?Nat)]; + logo: ?Text; + name: ?Text; + symbol: ?Text; + total_supply: ?Nat; + owner: ?Principal; + managers: ?[Principal]; + network: ?Principal; + token_ids: ?[Text]; + token_ids_count: ?Nat; + multi_canister: ?[Principal]; + multi_canister_count: ?Nat; + metadata: ?CandyTypes.CandyValue; + allocated_storage : ?Nat; + available_space : ?Nat; + }; + + public type CollectionData = { + var logo: ?Text; + var name: ?Text; + var symbol: ?Text; + var metadata: ?CandyTypes.CandyValue; + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + var allocated_storage: Nat; + var available_space : Nat; + var active_bucket: ?Principal; + }; + + public type CollectionDataForStorage = { + + var owner : Principal; + var managers: [Principal]; + var network: ?Principal; + + }; + + public type ManageStorageRequest = { + #add_storage_canisters : [(Principal, Nat, (Nat, Nat, Nat))]; + }; + + public type ManageStorageResponse = { + #add_storage_canisters : (Nat,Nat);//space allocated, space available + }; + + public type LogEntry = { + event : Text; + timestamp: Int; + data: CandyTypes.CandyValue; + caller: ?Principal; + }; + + public type OrigynError = {number : Nat32; text: Text; error: Errors; flag_point: Text;}; + + public type Errors = { + #app_id_not_found; + #asset_mismatch; + #attempt_to_stage_system_data; + #auction_ended; + #auction_not_started; + #bid_too_low; + #cannot_find_status_in_metadata; + #cannot_restage_minted_token; + #content_not_deserializable; + #content_not_found; + #deposit_burned; + #escrow_cannot_be_removed; + #escrow_owner_not_the_owner; + #escrow_withdraw_payment_failed; + #existing_sale_found; + #id_not_found_in_metadata; + #improper_interface; + #item_already_minted; + #item_not_owned; + #library_not_found; + #malformed_metadata; + #no_escrow_found; + #not_enough_storage; + #out_of_range; + #owner_not_found; + #property_not_found; + #receipt_data_mismatch; + #sale_not_found; + #sale_not_over; + #sale_id_does_not_match; + #sales_withdraw_payment_failed; + #storage_configuration_error; + #token_not_found; + #token_id_mismatch; + #token_non_transferable; + #unauthorized_access; + #unreachable; + #update_class_error; + #validate_deposit_failed; + #validate_deposit_wrong_amount; + #validate_deposit_wrong_buyer; + #validate_trx_wrong_host; + #withdraw_too_large; + #nyi; + + }; + + public func errors(the_error : Errors, flag_point: Text, caller: ?Principal) : OrigynError { + switch(the_error){ + case(#id_not_found_in_metadata){ + return { + number = 1; + text = "id was not found in the metadata. id is required."; + error = the_error; + flag_point = flag_point; + caller = caller; + + } + }; + case(#attempt_to_stage_system_data){ + return { + number = 2; + text = "user attempted to set the __system metadata during staging."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#cannot_find_status_in_metadata){ + return { + number = 3; + text = "Cannot find __system.status in metadata. It was expected to be there."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#token_not_found){ + return { + number = 4; + text = "Cannot find token."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#library_not_found){ + return { + number = 5; + text = "Cannot find library."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + case(#content_not_found){ + return { + number = 6; + text = "Cannot find chunk."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#content_not_deserializable){ + return { + number = 7; + text = "Cannot deserialize chunk."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#cannot_restage_minted_token){ + return { + number = 8; + text = "Cannot restage minted token."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#owner_not_found){ + return { + number = 9; + text = "Cannot find owner."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#item_already_minted){ + return { + number = 10; + text = "Already minted."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#item_not_owned){ + return { + number = 11; + text = "Account does not own this item."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#app_id_not_found){ + return { + number = 12; + text = "App id not found in app node."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#existing_sale_found){ + return { + number = 13; + text = "A sale for this item is already underway."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#out_of_range){ + return { + number = 14; + text = "out of rang."; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#property_not_found){ + return { + number = 15; + text = "property not found"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + //1000s - Error with underlying system + case(#update_class_error){ + return { + number = 1000; + text = "class could not be updated"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + // + case(#nyi){ + return { + number = 1999; + text = "not yet implemented"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + case(#unreachable){ + return { + number = 1998; + text = "unreachable"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#not_enough_storage){ + return { + number = 1001; + text = "not enough storage"; + error = the_error; + flag_point = flag_point; + caller = caller; + } + }; + case(#malformed_metadata){ + return { + number = 1002; + text = "malformed metadata"; + error = the_error; + flag_point = flag_point; + caller = caller; + } + + }; + case(#storage_configuration_error){ + return { + number = 1003; + text = "storage configuration error"; + error = the_error; + flag_point = flag_point; + caller = caller; + } + }; + //2000s - access + case(#unauthorized_access){ + return { + number = 2000; + text = "unauthorized access"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + //3000 - escrow erros + case(#no_escrow_found){ + return { + number = 3000; + text = "no escrow found"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + case(#deposit_burned){ + return { + number = 3001; + text = "deposit has already been burned"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + case(#escrow_owner_not_the_owner){ + return { + number = 3002; + text = "the owner in the escrow request does not own the item"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#validate_deposit_failed){ + return { + number = 3003; + text = "validate deposit failed"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#validate_trx_wrong_host){ + return { + number = 3004; + text = "validate deposit failed - wrong host"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#validate_deposit_wrong_amount){ + return { + number = 3005; + text = "validate deposit failed - wrong amount"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#validate_deposit_wrong_buyer){ + return { + number = 3006; + text = "validate deposit failed - wrong buyer"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#withdraw_too_large){ + return { + number = 3007; + text = "withdraw too large"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#escrow_cannot_be_removed){ + return { + number = 3008; + text = "escrow cannot be removed"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#escrow_withdraw_payment_failed){ + return { + number = 3009; + text = "could not pay the escrow"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#sales_withdraw_payment_failed){ + return { + number = 3010; + text = "could not pay the sales withdraw"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + + case(#improper_interface){ + return { + number = 3800; + text = "improper interface"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + + //auction errors + case(#sale_not_found){ + return { + number = 4000; + text = "sale not found"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#receipt_data_mismatch){ + return { + number = 4001; + text = "receipt_data_mismatch"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#asset_mismatch){ + return { + number = 4002; + text = "asset mismatch"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#token_id_mismatch){ + return { + number = 4003; + text = "token ids do not match"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#bid_too_low){ + return { + number = 4004; + text = "bid too low"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#sale_id_does_not_match){ + return { + number = 4005; + text = "sale not found"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#auction_ended){ + return { + number = 4006; + text = "auction has ended"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#sale_not_over){ + return { + number = 4007; + text = "sale not over"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#auction_not_started){ + return { + number = 4008; + text = "sale not started"; + error = the_error; + flag_point = flag_point; + caller = caller;} + }; + case(#token_non_transferable){ + return { + number = 4009; + text = "token is soulbound"; + error = the_error; + flag_point = flag_point;} + }; + }; + }; + + public let nft_status_staged = "staged"; + public let nft_status_minted = "minted"; + + public let metadata :{ + __system : Text; + __system_status : Text; + __system_secondary_royalty : Text; + __system_primary_royalty : Text; + __system_node : Text; + __system_originator : Text; + __system_wallet_shares : Text; + __apps :Text; + library : Text; + library_id : Text; + library_size : Text; + library_location_type: Text; + owner : Text; + id: Text; + primary_asset: Text; + preview_asset: Text; + experience_asset: Text; + hidden_asset: Text; + is_soulbound: Text; + primary_host: Text; + primary_port: Text; + primary_protcol: Text; + primary_royalties_default : Text; + royalty_broker : Text; + royalty_node : Text; + royalty_originator : Text; + royalty_network : Text; + royalty_custom : Text; + secondary_royalties_default : Text; + __apps_app_id : Text; + __system_current_sale_id : Text + } = { + __system = "__system"; + __system_status = "status"; + __system_secondary_royalty = "com.origyn.royalties.secondary"; + __system_primary_royalty = "com.origyn.royalties.primary"; + __system_node = "com.origyn.node"; + __system_originator = "com.origyn.originator"; + __system_wallet_shares = "com.origyn.wallet_shares"; + __apps = "__apps"; + library = "library"; + library_id = "library_id"; + library_size = "size"; + library_location_type = "location_type"; + owner = "owner"; + id = "id"; + primary_asset = "primary_asset"; + preview_asset = "preview_asset"; + primary_royalties_default = "com.origyn.royalties.primary.default"; + secondary_royalties_default = "com.origyn.royalties.secondary.default"; + hidden_asset = "hidden_asset"; + is_soulbound = "is_soulbound"; + primary_host = "primary_host"; + primary_port = "primary_port"; + primary_protcol = "primary_protcol"; + royalty_broker = "com.origyn.royalty.broker"; + royalty_node = "com.origyn.royalty.node"; + royalty_originator = "com.origyn.royalty.originator"; + royalty_network = "com.origyn.royalty.network"; + royalty_custom = "com.origyn.royalty.custom"; + experience_asset = "experience_asset"; + __apps_app_id = "app_id"; + __system_current_sale_id = "current_sale_id"; + }; + + + public func account_eq(a : Account, b : Account) : Bool{ + switch(a){ + case(#principal(a_principal)){ + switch(b){ + case(#principal(b_principal)){ + return a_principal == b_principal; + }; + case(#account_id(b_account_id)){ + return AccountIdentifier.toText(AccountIdentifier.fromPrincipal(a_principal, null)) == b_account_id; + }; + case(#account(b_account)){ + return AccountIdentifier.toText(AccountIdentifier.fromPrincipal(a_principal, null)) == AccountIdentifier.toText(AccountIdentifier.fromPrincipal(b_account.owner, switch(b_account.sub_account){case(null){null}; case(?val){?Blob.toArray(val)}})) ; + }; + case(#extensible(b_extensible)){ + //not implemented + return false; + }; + }; + }; + case(#account_id(a_account_id)){ + switch(b){ + case(#principal(b_principal)){ + return a_account_id == AccountIdentifier.toText(AccountIdentifier.fromPrincipal(b_principal,null)); + }; + case(#account_id(b_account_id)){ + return a_account_id == b_account_id; + }; + case(#account(b_account)){ + return a_account_id == AccountIdentifier.toText(AccountIdentifier.fromPrincipal(b_account.owner, switch(b_account.sub_account){case(null){null}; case(?val){?Blob.toArray(val)}})) ; + }; + case(#extensible(b_extensible)){ + //not implemented + return false; + } + } + }; + case(#extensible(a_extensible)){ + switch(b){ + case(#principal(b_principal)){ + return false; + }; + case(#account_id(b_account_id)){ + return false; + }; + case(#account(b_account_id)){ + return false; + }; + case(#extensible(b_extensible)){ + //not implemented + return false; + } + }; + }; + case(#account(a_account)){ + switch(b){ + case(#principal(b_principal)){ + return AccountIdentifier.toText(AccountIdentifier.fromPrincipal(a_account.owner, switch(a_account.sub_account){case(null){null}; case(?val){?Blob.toArray(val)}})) == AccountIdentifier.toText(AccountIdentifier.fromPrincipal(b_principal, null)) ; + }; + case(#account_id(b_account_id)){ + return AccountIdentifier.toText(AccountIdentifier.fromPrincipal(a_account.owner, switch(a_account.sub_account){case(null){null}; case(?val){?Blob.toArray(val)}})) == b_account_id; + }; + case(#account(b_account)){ + return a_account.owner == b_account.owner and a_account.sub_account == b_account.sub_account; + }; + case(#extensible(b_extensible)){ + //not implemented + return false; + }; + }; + } + }; + }; + + + + public func token_compare (a : TokenSpec, b : TokenSpec) : Order.Order{ + /* #ic: { + canister: Principal; + standard: { + #DIP20; + #Ledger; + #ICRC1; + #EXTFungible; + } + }; + #extensible : CandyTypes.CandyValue; //#Class*/ + switch(a, b){ + case(#ic(a_token), #ic(b_token)){ + return Principal.compare(a_token.canister, b_token.canister); + }; + case(#extensible(a_token), #ic(b_token)){ + return Text.compare(Conversions.valueToText(a_token), Principal.toText(b_token.canister)); + }; + case(#ic(a_token), #extensible(b_token)){ + return Text.compare(Principal.toText(a_token.canister),Conversions.valueToText(b_token)); + }; + case(#extensible(a_token), #extensible(b_token)){ + return Text.compare(Conversions.valueToText(a_token), Conversions.valueToText(b_token)); + }; + }; + }; + + public func token_eq(a : TokenSpec, b : TokenSpec) : Bool{ + /* #ic: { + canister: Principal; + standard: { + #DIP20; + #Ledger; + #EXTFungible; + #ICRC1; + } + }; + #extensible : CandyTypes.CandyValue; //#Class*/ + switch(a){ + case(#ic(a_token)){ + switch(b){ + case(#ic(b_token)){ + + if(a_token.standard != b_token.standard){ + return false; + }; + if(a_token.canister != b_token.canister){ + return false; + }; + return true; + }; + case(#extensible(b_token)){ + //not implemented + return false; + }; + }; + }; + case(#extensible(a_token)){ + switch(b){ + case(#ic(b_token)){ + //not implemented + return false; + }; + case(#extensible(b_token)){ + //not implemented + return false; + }; + + } + }; + }; + }; + + public func account_hash(a : Account) : Nat{ + switch(a){ + case(#principal(a_principal)){ + Nat32.toNat(Principal.hash(a_principal)); + }; + case(#account_id(a_account_id)){ + Nat32.toNat(Text.hash(a_account_id)); + + }; + case(#account(a_account)){ + Nat32.toNat(Text.hash(AccountIdentifier.toText(AccountIdentifier.fromPrincipal(a_account.owner, switch(a_account.sub_account){case(null){null}; case(?val){?Blob.toArray(val)}})) )); + + }; + case(#extensible(a_extensible)){ + //unimplemnted; unsafe; probably dont use + //until a reliable valueToHash function is written + //if any redenring of classes changes the whole hash + //will change + Nat32.toNat(Text.hash(Conversions.valueToText(a_extensible))); + + }; + }; + }; + + public func account_hash_uncompressed(a : Account) : Nat{ + switch(a){ + case(#principal(a_principal)){ + NFTUtils.hashBlob(Principal.toBlob(a_principal)); + }; + case(#account_id(a_account_id)){ + + let accountBlob = switch(hex.decode(a_account_id)){ + case(#ok(item)){Blob.fromArray(item)}; + case(#err(err)){ + D.trap("Not a valid hex"); + }; + }; + NFTUtils.hashBlob(accountBlob); + }; + case(#account(a_account)){ + let account_id = AccountIdentifier.toText(AccountIdentifier.fromPrincipal(a_account.owner, switch(a_account.sub_account){case(null){null}; case(?val){?Blob.toArray(val)}})); + let accountBlob = switch(hex.decode(account_id)){ + case(#ok(item)){Blob.fromArray(item)}; + case(#err(err)){ + D.trap("Not a valid hex"); + }; + }; + NFTUtils.hashBlob(accountBlob); + }; + case(#extensible(a_extensible)){ + //unimplemnted; unsafe; probably dont use + //until a reliable valueToHash function is written + //if any redenring of classes changes the whole hash + //will change + NFTUtils.hashBlob(Conversions.valueToBlob(#Text(Conversions.valueToText(a_extensible)))); + }; + }; + }; + + + public func token_hash(a : TokenSpec) : Nat { + switch(a){ + case(#ic(a)){ + Nat32.toNat(Principal.hash(a.canister)); + + }; + case(#extensible(a_extensible)){ + //unimplemnted; unsafe; probably dont use + //until a reliable valueToHash function is written + //if any redenring of classes changes the whole hash + //will change + Nat32.toNat(Text.hash(Conversions.valueToText(a_extensible))); + }; + }; + + }; + + public func token_hash_uncompressed(a : TokenSpec) : Nat { + switch(a){ + case(#ic(a)){ + NFTUtils.hashBlob(Principal.toBlob(a.canister)); + + + }; + case(#extensible(a_extensible)){ + //unimplemnted; unsafe; probably dont use + //until a reliable valueToHash function is written + //if any redenring of classes changes the whole hash + //will change + NFTUtils.hashBlob(Conversions.valueToBlob(a_extensible)); + + }; + }; + + }; + + public let account_handler = (account_hash, account_eq); + + public let token_handler = (token_hash, token_eq); + + public type HTTPResponse = { + body : Blob; + headers : [HeaderField]; + status_code : Nat16; + streaming_strategy : ?StreamingStrategy; + }; + + + + public type StreamingCallback = query (StreamingCallbackToken) -> async (StreamingCallbackResponse); + + + + public type StreamingCallbackResponse = { + body : Blob; + token : ?StreamingCallbackToken; + }; + + public type StorageService = actor{ + stage_library_nft_origyn : shared (StageChunkArg, AllocationRecordStable, CandyTypes.CandyValue) -> async Result.Result; + storage_info_nft_origyn : shared query () -> async Result.Result; + chunk_nft_origyn : shared query ChunkRequest -> async Result.Result; + refresh_metadata_nft_origyn : (token_id: Text, metadata: CandyTypes.CandyValue) -> async Result.Result + }; + + public type Service = actor { + __advance_time : shared Int -> async Int; + __set_time_mode : shared { #test; #standard } -> async Bool; + balance : shared query EXT.BalanceRequest -> async BalanceResponse; + balanceEXT : shared query EXT.BalanceRequest -> async BalanceResponse; + balanceOfDip721 : shared query Principal -> async Nat; + balance_of_nft_origyn : shared query Account -> async Result.Result; + bearer : shared query EXT.TokenIdentifier -> async Result.Result; + bearerEXT : shared query EXT.TokenIdentifier -> async Result.Result; + bearer_nft_origyn : shared query Text -> async Result.Result; + bearer_batch_nft_origyn : shared query [Text] -> async [Result.Result]; + bearer_secure_nft_origyn : shared Text -> async Result.Result; + bearer_batch_secure_nft_origyn : shared [Text] -> async [Result.Result]; + canister_status : shared { + canister_id : canister_id; + } -> async canister_status; + collection_nft_origyn : (fields : ?[(Text, ?Nat, ?Nat)]) -> async Result.Result; + collection_update_nft_origyn : (ManageCollectionCommand) -> async Result.Result; + collection_update_batch_nft_origyn : ([ManageCollectionCommand]) -> async [Result.Result]; + cycles : shared query () -> async Nat; + getEXTTokenIdentifier : shared query Text -> async Text; + get_nat_as_token_id : shared query Nat -> async Text; + get_token_id_as_nat : shared query Text -> async Nat; + http_request : shared query HttpRequest -> async HTTPResponse; + http_request_streaming_callback : shared query StreamingCallbackToken -> async StreamingCallbackResponse; + manage_storage_nft_origyn : shared ManageStorageRequest -> async Result.Result; + market_transfer_nft_origyn : shared MarketTransferRequest -> async Result.Result; + market_transfer_batch_nft_origyn : shared [MarketTransferRequest] -> async [Result.Result]; + mint_nft_origyn : shared (Text, Account) -> async Result.Result; + nftStreamingCallback : shared query StreamingCallbackToken -> async StreamingCallbackResponse; + chunk_nft_origyn : shared query ChunkRequest -> async Result.Result; + history_nft_origyn : shared query (Text, ?Nat, ?Nat) -> async Result.Result<[TransactionRecord],OrigynError>; + nft_origyn : shared query Text -> async Result.Result; + update_app_nft_origyn : shared NFTUpdateRequest -> async Result.Result; + ownerOf : shared query Nat -> async DIP721.OwnerOfResponse; + ownerOfDIP721 : shared query Nat -> async DIP721.OwnerOfResponse; + share_wallet_nft_origyn : shared ShareWalletRequest -> async Result.Result; + sale_nft_origyn : shared ManageSaleRequest -> async Result.Result; + sale_info_nft_origyn : shared SaleInfoRequest -> async Result.Result; + stage_library_nft_origyn : shared StageChunkArg -> async Result.Result; + stage_nft_origyn : shared { metadata : CandyTypes.CandyValue } -> async Result.Result; + storage_info_nft_origyn : shared query () -> async Result.Result; + transfer : shared EXT.TransferRequest -> async EXT.TransferResponse; + transferEXT : shared EXT.TransferRequest -> async EXT.TransferResponse; + transferFrom : shared (Principal, Principal, Nat) -> async DIP721.Result; + transferFromDip721 : shared (Principal, Principal, Nat) -> async DIP721.Result; + whoami : shared query () -> async Principal; + }; + + +} \ No newline at end of file diff --git a/src/origyn_nft_reference/utils.mo b/src/origyn_nft_reference/utils.mo new file mode 100644 index 0000000..3769fb6 --- /dev/null +++ b/src/origyn_nft_reference/utils.mo @@ -0,0 +1,316 @@ + +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import Candy "mo:candy_0_1_10/types"; +import CandyTypes "mo:candy_0_1_10/types"; +import Prelude "mo:base/Prelude"; +import Workspace "mo:candy_0_1_10/workspace"; +import Conversions "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Nat32 "mo:base/Nat32"; +import Nat "mo:base/Nat"; +import Iter "mo:base/Iter"; +import Hash "mo:base/Hash"; +import List "mo:base/List"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Text "mo:base/Text"; +import Order "mo:base/Order"; +import Time "mo:base/Time"; +import TrieMap "mo:base/TrieMap"; +import Types "types"; +import Char "mo:base/Char"; +import Buffer "mo:base/Buffer"; +import SB "mo:stablebuffer_0_2_0/StableBuffer"; +import SHA256 "mo:crypto/SHA/SHA256"; + + +module { + + + public func get_nat_as_token_id(tokenNat : Nat) : Text { + D.print("nat as token"); + D.print(debug_show(Conversions.natToBytes(tokenNat))); + + var staged = Conversions.natToBytes(tokenNat); + let stagedBuffer = CandyTypes.toBuffer(staged); + let prefixBuffer = if(staged.size() % 4 == 0){CandyTypes.toBuffer([])} + else if(staged.size() % 4 == 1){CandyTypes.toBuffer([0,0,0])} + else if(staged.size() % 4 == 2){CandyTypes.toBuffer([0,0])} + else {CandyTypes.toBuffer([0])}; + + prefixBuffer.append(stagedBuffer); + return Conversions.bytesToText((prefixBuffer.toArray())); + }; + + public func get_token_id_as_nat(token_id : Text) : Nat{ + D.print("token as nat:" # token_id); + D.print(debug_show(Conversions.textToBytes(token_id))); + Conversions.bytesToNat(Conversions.textToBytes(token_id)); + }; + + public func is_owner_manager_network(state :Types.State, caller: Principal) : Bool{ + + //debug {D.print("checking if " # Principal.toText(caller) # " is network:" # debug_show(state.state.collection_data.network) # " owner: " # debug_show(state.state.collection_data.owner) # " manager: " # debug_show(state.state.collection_data.managers))}; + if(caller == state.state.collection_data.owner){return true;}; + if(Array.filter(state.state.collection_data.managers, func(item : Principal){item == caller}).size() > 0){return true;}; + if(Option.make(caller) == state.state.collection_data.network){return true;}; + + return false; + }; + + public func is_owner_network(state :Types.State, caller: Principal) : Bool{ + if(caller == state.state.collection_data.owner){return true;}; + if(Option.make(caller) == state.state.collection_data.network){return true;}; + return false; + }; + + public func add_log(state: Types.State, entry : Types.LogEntry){ + if(SB.size(state.state.log) >= 1000){ + SB.add<[Types.LogEntry]>(state.state.log_history, SB.toArray(state.state.log)); + state.state.log := SB.initPresized(1000); + }; + SB.add(state.state.log, entry); + }; + + public func get_auction_state_from_status(current_sale : Types.SaleStatus ) : Result.Result { + + switch(current_sale.sale_type) { + case(#auction(state)){ + #ok(state); + }; + /* case(_){ + return #err(Types.errors(#nyi, "bid_nft_origyn - sales state not implemented " # current_sale.sale_id, null)); + }; */ + }; + }; + + public func get_auction_state_from_statusStable(current_sale : Types.SaleStatusStable ) : Result.Result { + + switch(current_sale.sale_type) { + case(#auction(state)){ + #ok(state); + }; + /* case(_){ + return #err(Types.errors(#nyi, "bid_nft_origyn - sales state not implemented " # current_sale.sale_id, null)); + }; */ + }; + }; + + + + + + public func build_library(items: [(Text,[(Text,CandyTypes.AddressedChunkArray)])]) : TrieMap.TrieMap>{ + + let aMap = TrieMap.TrieMap>(Text.equal,Text.hash); + for(this_item in items.vals()){ + let bMap = TrieMap.TrieMap(Text.equal,Text.hash); + for(thatItem in this_item.1.vals()){ + bMap.put(thatItem.0, Workspace.fromAddressedChunks(thatItem.1)); + }; + aMap.put(this_item.0, bMap); + }; + + return aMap; + }; + + public func compare_library(x : (Text, Text), y: (Text, Text)) : Order.Order { + let a = Text.compare(x.0, y.0); + switch(a){ + case(#equal){ + return Text.compare(x.1,y.1); + }; + case(_){ + return a; + }; + }; + }; + + public func library_equal(x : (Text, Text), y: (Text, Text)) : Bool { + + switch(compare_library(x, y)){ + case(#equal){ + return true; + }; + case(_){ + return false; + }; + }; + }; + + public func library_hash(x : (Text, Text)) : Nat { + return Nat32.toNat(Text.hash("token_id" # x.0 # "library_id" # x.1)); + + }; + + public func get_deposit_info(depositor_account : Types.Account, host: Principal) : Types.SubAccountInfo{ + D.print("getting deposit info"); + get_subaccount_info("com.origyn.nft.deposit", depositor_account, host); + }; + + + public func get_escrow_account_info(request : Types.EscrowReceipt, host: Principal) : Types.SubAccountInfo{ + + D.print("Getting escrow account"); + let h = SHA256.New(); + h.write(Conversions.valueToBytes(#Text("com.origyn.nft.escrow"))); + h.write(Conversions.valueToBytes(#Text("buyer"))); + h.write(Conversions.valueToBytes(#Nat(Types.account_hash_uncompressed(request.buyer)))); + h.write(Conversions.valueToBytes(#Text("seller"))); + h.write(Conversions.valueToBytes(#Nat(Types.account_hash_uncompressed(request.seller)))); + h.write(Conversions.valueToBytes(#Text(("tokenid")))); + h.write(Conversions.valueToBytes(#Text(request.token_id))); + h.write(Conversions.valueToBytes(#Text("ledger"))); + h.write(Conversions.valueToBytes(#Nat(Types.token_hash_uncompressed(request.token)))); + let sub_hash =h.sum([]); + + let to = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(host, ?sub_hash)); + + return { + principal = host; + account_id_text = AccountIdentifier.toText(to); + account_id = Blob.fromArray(to); + account = { + principal = host; + sub_account = (Blob.fromArray(sub_hash)); + } + }; + }; + + public func hash_blob(item: Blob) : Nat{ + let h = SHA256.New(); + h.write(Blob.toArray(item)); + let sub_hash =h.sum([]); + return Conversions.valueToNat(#Bytes(#frozen(sub_hash))); + }; + + public func get_sale_account_info(request : Types.EscrowReceipt, host: Principal) : Types.SubAccountInfo{ + + let h = SHA256.New(); + h.write(Conversions.valueToBytes(#Nat32(Text.hash("com.origyn.nft.sale")))); + h.write(Conversions.valueToBytes(#Nat32(Text.hash("buyer")))); + h.write(Conversions.valueToBytes(#Nat(Types.account_hash_uncompressed(request.buyer)))); + h.write(Conversions.valueToBytes(#Nat32(Text.hash("seller")))); + h.write(Conversions.valueToBytes(#Nat(Types.account_hash_uncompressed(request.seller)))); + h.write(Conversions.valueToBytes(#Nat32(Text.hash("tokenid")))); + h.write(Conversions.valueToBytes(#Text(request.token_id))); + h.write(Conversions.valueToBytes(#Nat32(Text.hash("ledger")))); + h.write(Conversions.valueToBytes(#Nat(Types.token_hash_uncompressed(request.token)))); + let sub_hash =h.sum([]); + + let to = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(host, ?sub_hash)); + + + return { + principal = host; + account_id_text = AccountIdentifier.toText(to); + account_id = Blob.fromArray(to); + account = { + principal = host; + sub_account =(Blob.fromArray(sub_hash)); + } + }; + }; + + private func get_subaccount_info(prefix: Text, account : Types.Account, host: Principal) : Types.SubAccountInfo{ + D.print("in get subaccount"); + switch(account){ + case(#principal(principal)){ + let buffer = CandyTypes.toBuffer(Blob.toArray(Text.encodeUtf8(prefix # ".principal"))); + buffer.append(CandyTypes.toBuffer(Blob.toArray(Principal.toBlob(principal)))); + + let h = SHA256.New(); + h.write(buffer.toArray()); + let sha = h.sum([]); + + + let to = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(host, ?sha)); + + + return { + principal = host; + account_id_text = AccountIdentifier.toText(to); + account_id = Blob.fromArray(to); + account = { + principal = host; + sub_account = Blob.fromArray(sha); + } + }; + }; + case(#account(account)){ + let buffer = CandyTypes.toBuffer(Blob.toArray(Text.encodeUtf8(prefix # ".account"))); + buffer.append(CandyTypes.toBuffer(Blob.toArray(Principal.toBlob(account.owner)))); + switch(account.sub_account){ + case(null){}; + case(?val){ + buffer.append(CandyTypes.toBuffer(Blob.toArray(val))); + + } + }; + + let h = SHA256.New(); + h.write(buffer.toArray()); + let sha = h.sum([]); + + + let to = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(host, ?sha)); + + return { + principal = host; + account_id_text = AccountIdentifier.toText(to); + account_id = Blob.fromArray(to); + account = { + principal = host; + sub_account = Blob.fromArray(sha); + } + }; + }; + case(#account_id(account_id)){ + let buffer = CandyTypes.toBuffer(Blob.toArray(Text.encodeUtf8(prefix # ".accountid"))); + switch(AccountIdentifier.fromText(account_id)){ + case(#ok(accountblob)){ + buffer.append(CandyTypes.toBuffer((AccountIdentifier.addHash(accountblob)))); + + }; + case(#err(err)){ + + }; + }; + + let h = SHA256.New(); + h.write(buffer.toArray()); + let sha = h.sum([]); + + + let to = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(host, ?sha)); + + + return { + principal = host; + account_id_text = AccountIdentifier.toText(to); + account_id = Blob.fromArray(to); + account = { + principal = host; + sub_account = Blob.fromArray(sha); + } + }; + }; + case(#extensible(data)){ + return Prelude.nyi(); //cant implement until candy has stable hash + } + }; + }; + + + + + + + + + +} \ No newline at end of file diff --git a/src/origyn_sale_reference/main.mo b/src/origyn_sale_reference/main.mo new file mode 100644 index 0000000..358757f --- /dev/null +++ b/src/origyn_sale_reference/main.mo @@ -0,0 +1,1834 @@ +import Array "mo:base/Array"; +import Buffer "mo:base/Buffer"; +import CandyTypes "mo:candy_0_1_10/types"; +import D "mo:base/Debug"; +import Deque "mo:base/Deque"; +import Error "mo:base/Error"; +import Iter "mo:base/Iter"; +import List "mo:base/List"; +import Map "mo:map_6_0_0/Map"; +import NFTTypes "../origyn_nft_reference/types"; +import NFTUtils "../origyn_nft_reference/utils"; +import Nat "mo:base/Nat"; +import Nat32 "mo:base/Nat32"; +import Option "mo:base/Option"; +import Order "mo:base/Order"; +import Principal "mo:base/Principal"; +import RBU "mo:base/RBTree"; +import Result "mo:base/Result"; +import Text "mo:base/Text"; +import Time "mo:base/Time"; +import Types "types"; + + +//this is an alpha canister provided as an example of how one could +//run a sale using the NFT SaleCanister +//comments and documentation are pending + +shared (deployer) actor class SaleCanister(__initargs : Types.InitArgs) = this { + + + stable var __time_mode : {#test; #standard;} = #standard; + private var __test_time : Int = 0; + + private func get_time() : Int{ + switch(__time_mode){ + case(#standard){return Time.now();}; + case(#test){return __test_time;}; + }; + + }; + + //D.print("instantiating sales canister"); + + + stable var state : Types.State = { + var owner : Principal = __initargs.owner; + var manager : ?Principal = null; + var nft_inventory : Types.NFTInventory = Map.new(); + var nft_group : Types.Groups = Map.new(); + var nft_group_size : Nat = 0; + var nft_reservation : Types.Reservations = Map.new(); + var nft_reservation_size : Nat = 0; + var user_allocations : Types.Allocations = Map.new(); + var user_registrations : Types.Registrations = Map.new(); + var user_purchases: Types.Purchases = Map.new>(); + var allocation_expiration : Int = __initargs.allocation_expiration; + var nft_gateway : ?Principal = __initargs.nft_gateway; + var sale_open_date = __initargs.sale_open_date; + var registration_date = __initargs.registration_date; + var end_date = __initargs.end_date; + var required_lock_date = __initargs.required_lock_date; + var allocation_queue : Deque.Deque<(Principal, Int)> = Deque.empty<(Principal, Int)>(); + }; + + + // var nft_group : Types.Groups = Map.new(); + // var nft_group_size : Nat = 0; + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + + let ledger_principal : Principal = Principal.fromText("dw5hj-fcc4h-22h5p-zdkx2-3byeo-f2vf3-jv5sa-gckmc-mtnss-zojch-oqe"); + + let alice_seller : Principal = Principal.fromText("u74sm-wx4yh-capur-xnz4w-orbcn-l3jlc-m65rb-ue5ah-mqyvz-fmvvc-tae"); + + let jess_buyer : Principal = Principal.fromText("3j2qa-oveg3-2agc5-735se-zsxjj-4n65k-qmnse-byzkf-4xhw5-mzjxe-pae"); + + let timestamp = Time.now(); + + let one_month_nanos : Int= 2628000000000000; + let max_time_nanos : Int = 18653431178000000000; + + public shared(msg) func manage_sale_nft_origyn(command : Types.ManageCommand) : async Result.Result{ + switch(command){ + case(#UpdateOwner(val)){ + if(msg.caller == state.owner){ + state.owner := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + }; + case(#UpdateAllocationExpiration(val)){ + if(msg.caller == state.owner){ + if(val > one_month_nanos){ + return #err(Types.errors(#bad_date, "manage_sale_nft_origyn cannot hold deposit for more than one month", ?msg.caller)) + }; + state.allocation_expiration := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + }; + case(#UpdateNFTGateway(val)){ + if(msg.caller == state.owner){ + state.nft_gateway := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + }; + case(#UpdateSaleOpenDate(val)){ + if(msg.caller == state.owner){ + switch(val){ + case(?val){ + if(val > max_time_nanos or val < get_time() - one_month_nanos){ + return #err(Types.errors(#bad_date, "manage_sale_nft_origyn sale open date not in a viable range", ?msg.caller)) + }; + }; + case(null){}; + }; + state.sale_open_date := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + }; + case(#UpdateRegistrationDate(val)){ + if(msg.caller == state.owner){ + switch(val){ + case(?val){ + if(val > max_time_nanos or val < get_time() - one_month_nanos){ + return #err(Types.errors(#bad_date, "manage_sale_nft_origyn sale open date not in a viable range", ?msg.caller)) + }; + }; + case(null){}; + }; + state.registration_date := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + }; + case(#UpdateEndDate(val)){ + if(msg.caller == state.owner){ + switch(val){ + case(?val){ + if(val > max_time_nanos or val < get_time() - one_month_nanos){ + return #err(Types.errors(#bad_date, "manage_sale_nft_origyn sale open date not in a viable range", ?msg.caller)) + }; + }; + case(null){}; + }; + state.end_date := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + }; + case(#UpdateLockDate(val)){ + if(msg.caller == state.owner){ + switch(val){ + case(?val){ + if(val > max_time_nanos or val < get_time() - one_month_nanos){ + return #err(Types.errors(#bad_date, "manage_sale_nft_origyn sale open date not in a viable range", ?msg.caller)) + }; + }; + case(null){}; + }; + state.required_lock_date := val; + return #ok(true); + } else { + return #err(Types.errors(#unauthorized_access, "manage_sale_nft_origyn only owner can manage sale canister", ?msg.caller)) + }; + } + + } + }; + + + public query(msg) func get_metrics_sale_nft_origyn() : async Result.Result{ + return #ok{ + owner = state.owner; + allocation_expiration = state.allocation_expiration; + nft_gateway = state.nft_gateway; + sale_open_date = state.sale_open_date; + registration_date = state.registration_date; + end_date = state.end_date; + }; + + }; + + + + // Retrieves a list of groups for a particular user or address + public query(msg) func get_groups() : async Result.Result{ + // ToDo: + // Are the amounts in cycles? + return #ok([ + { + namespace = "alpha"; + pricing = ?[ + #cost_per({ + amount = 100_000_000; + token = #ic({ + canister = ledger_principal; + fee = 200000; + symbol = "DIP"; + decimals = 8; + standard = #DIP20; + }); + }) + ]; + allowed_amount = ?5; + } + ]); + // return #err(Types.errors(#nyi, "manage_nfts nyi", ?msg.caller)); + }; + + // We probably don't need this + public shared(msg) func get_escrow() : async Result.Result{ + // ToDo: + // Need to add more realistic data, the first goal was to spill the correct structure + // Are we using the correct txn #escrow_deposit? + // Find out the how to hardcode candytype and uncomment from here and from types nft_reference + + + + return #ok({ + receipt = { + amount = 100_000_000; + seller = #principal(alice_seller); + buyer = #principal(jess_buyer); + token_id = "OG1"; + token = #ic({ + canister = ledger_principal; + fee = 200000; + symbol = "DIP"; + decimals = 8; + standard = #DIP20; + }); + + }; + balance = 100_000_000_000; + transaction = { + token_id = "OG1"; + index = 2; + txn_type = #escrow_deposit({ + seller = #principal(alice_seller); + buyer = #principal(jess_buyer); + token = #ic({ + canister = ledger_principal; + fee = 200000; + symbol = "DIP"; + decimals = 8; + standard = #DIP20; + }); + token_id = "OG1"; + amount = 100_000_000; + trx_id = #nat(10000000); + extensible = #Bool(false); + }); + timestamp = timestamp; + }; + }); + // return #err(Types.errors(#nyi, "manage_nfts nyi", ?msg.caller)); + }; + + // Allows the adding/removing of inventory items + //made this a batch process so that adding NFT items doesn't take all day //need to test max add + public shared(msg) func manage_nfts_sale_nft_origyn(request: [Types.ManageNFTRequest]) : async Result.Result{ + + // ToDo: + // Need to add better error catching here - trying to get something workable + + //D.print("in manage nft " # debug_show(msg.caller, state.owner)); + + if(msg.caller != state.owner){ + return #err(Types.errors(#unauthorized_access, "manage_nfts only owner can manage nfts", ?msg.caller)) + }; + let results = Buffer.Buffer(request.size()); + for(this_request in request.vals()){ + switch(this_request){ + + case(#add(val)){ + //search for existing + switch(Map.get(state.nft_inventory, Map.thash, val.token_id)){ + case(null){ + + Map.set(state.nft_inventory, Map.thash, val.token_id, { + canister = val.canister; + token_id = val.token_id; + var available = true; + var sale_block = null; + var allocation = null; + var reservations = Map.new(); // + }); + results.add(#add(val.token_id)); + }; + case(?val){ + results.add(#err(val.token_id, Types.errors(#inventory_item_exists, "token exists in sales canister " # val.token_id, ?msg.caller))); + }; + }; + + }; + case(#remove(val)){ + //search for existing + switch(Map.get(state.nft_inventory, Map.thash, val)){ + case(null){ + results.add(#err(val, Types.errors(#inventory_item_does_not_exists, "token does not exists in sales canister " # val, ?msg.caller))); + }; + case(?val){ + + Map.delete(state.nft_inventory, Map.thash, val.token_id); + results.add(#remove(val.token_id)); + }; + }; + }; + }; + }; + + return #ok({ + total_size = Map.size(state.nft_inventory); + items = results.toArray() + }); + }; + + + // Allows the creator to create and manage groups. These groups can be allocated a certain number of NFTs + // and/or have special pricing based on the number of nfts they buy + public shared(msg) func manage_group_sale_nft_origyn(request: Types.ManageGroupRequest) : async Types.ManageGroupResponse{ + + // ToDo: + // Add redemptions_size + // How to add allowed_amount without error from each case + // Could not add members: SB.StableBuffer ( had an error ) + // Test from test_runner_sale + + if(msg.caller != state.owner){ + return [ + #err(Types.errors(#unauthorized_access, "manage_group_sale_nft_origyn only owner can manage groups", ?msg.caller))]; + }; + + let results = Buffer.Buffer(request.size()); + + for(this_item in request.vals()){ + // redemptions_size : Nat; + + + + switch(this_item){ + case(#update(val)){ + //D.print("manage_group_sale_nft_origyn" # "\n" #"add : " # debug_show(val.namespace) ); + switch(Map.get(state.nft_group, Map.thash, val.namespace)){ + case(null){ + let thisGroup = { + namespace = val.namespace; + var members = switch(val.members){ + case(null){Map.new()}; + case(?members){ + var tree = Map.new(); + for(this_item in members.vals()){ + Map.set(tree, Map.phash, this_item, get_time()); + }; + tree + } + }; + var redemptions = Map.new(); + var pricing = switch(val.pricing){ + case(null){[]}; + case(?pricing){pricing}; + }; + var allowed_amount = val.allowed_amount; + var tier = val.tier; + var additive = val.additive; + }; + state.nft_group_size += 1; + Map.set(state.nft_group, Map.thash, val.namespace, thisGroup); + results.add(#update(#ok(Types.group_stabalize(thisGroup)))); + }; + case(?found){ + + switch(val.pricing){ + case(null){}; + case(?pricing){found.pricing := pricing}; + }; + found.allowed_amount := val.allowed_amount; + found.additive := val.additive; + found.tier := val.tier; + switch(val.members){ //if provided replaces the members + case(null){}; + case(?members){ + var tree = Map.new(); + for(this_item in members.vals()){ + Map.set(tree, Map.phash, this_item, get_time()); + }; + found.members := tree; + }; + }; + + results.add(#update(#ok(Types.group_stabalize(found)))); + }; + }; + }; + case(#remove(val)){ + //D.print("manage_group_sale_nft_origyn" # "\n" # "remove : " # debug_show(val.namespace)); + switch(Map.get(state.nft_group, Map.thash, val.namespace)){ + case(null){ + results.add(#remove(#err(Types.errors(#group_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)))); + }; + case(?val){ + state.nft_group_size -= 1; + Map.delete(state.nft_group, Map.thash, val.namespace); + results.add(#remove(#ok(val.namespace))); + }; + }; + + }; + case(#addMembers(val)){ + //D.print("manage_group_sale_nft_origyn" # "\n" # "addMembers : " # debug_show(val.namespace)); + + let res = Map.get(state.nft_group, Map.thash, val.namespace); + switch(res){ + case(null){ + results.add(#addMembers(#err(Types.errors(#group_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)))); + }; + case(?v){ + //let membersToBe = Buffer.Buffer(0); + for (i in val.members.vals()){ + Map.set(v.members, Map.phash, i, get_time()); + }; + + + /* D.print("manage_group_sale_nft_origyn" # "\n" # + "addMembers : " # debug_show(val.namespace) # "\n\n" # + "res : " # debug_show(res) # "\n\n" # + "val.members : " # debug_show(val.members) # "\n\n" # + "res.members : " # debug_show(v.members) # "\n\n" # + //"MEMBERS TO BE : " # debug_show(membersToBe.toArray()) # "\n\n" # + "state.nft_group : " # debug_show(state.nft_group) # "\n\n" + ); */ + results.add(#addMembers(#ok((val.members.size(), Map.size(v.members))))); + }; + }; + + }; + case(#removeMembers(val)){ + let res = Map.get(state.nft_group, Map.thash, val.namespace); + switch(res){ + case(null){ + results.add(#removeMembers(#err(Types.errors(#group_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)))); + }; + case(?v){ + //let membersToBe = Buffer.Buffer(0); + for (i in val.members.vals()){ + Map.delete(v.members, Map.phash, i); + }; + + + /* D.print("manage_group_sale_nft_origyn" # "\n" # + "addMembers : " # debug_show(val.namespace) # "\n\n" # + "res : " # debug_show(res) # "\n\n" # + "val.members : " # debug_show(val.members) # "\n\n" # + "res.members : " # debug_show(v.members) # "\n\n" # + //"MEMBERS TO BE : " # debug_show(membersToBe.toArray()) # "\n\n" # + "state.nft_group : " # debug_show(state.nft_group) # "\n\n" + ); */ + results.add(#removeMembers(#ok((val.members.size(), Map.size(v.members))))); + }; + + }; + + }; + }; + }; + + + + return results.toArray(); + // return #err(Types.errors(#nyi, "manage_group_sale_nft_origyn nyi", ?msg.caller)); + }; + + // Allows a creator to associate a set of nfts with a particular group or address + public shared(msg) func manage_reservation_sale_nft_origyn(request: [Types.ManageReservationRequest]) : async Result.Result{ + + //todo: we really need to moniter the ingress size here and put some limits in...inspect message would be awesome + if(msg.caller != state.owner){ + return #err(Types.errors(#unauthorized_access, "manage_reservation only owner can manage reservations", ?msg.caller)) + }; + + var namespace : Text = ""; + var reservation_type : Types.ReservationType = #Principal(jess_buyer); + var exclusive : Bool = false; + var nfts_size : Nat = 0; + + let results = Buffer.Buffer(request.size()); + + for(this_item in request.vals()){ + switch(this_item){ + case(#add(val)){ + //D.print("manage_reservation" # "\n" #"add : " # debug_show(val.namespace)); + switch(Map.get(state.nft_reservation, Map.thash, val.namespace)){ + case(null){ + state.nft_reservation_size += 1; + Map.set(state.nft_reservation, Map.thash, val.namespace, val); + namespace := val.namespace; + reservation_type := val.reservation_type; + exclusive := val.exclusive; + nfts_size := val.nfts.size(); + results.add(#add(val.namespace)); + }; + case(?val){ + return #err(Types.errors(#reservation_item_exists, "group exists in sales canister " # val.namespace, ?msg.caller)); + }; + }; + }; + case(#remove(val)){ + //D.print("manage_reservation" # "\n" # "remove : " # debug_show(val.namespace)); + switch(Map.get(state.nft_reservation, Map.thash, val.namespace)){ + case(null){ + return #err(Types.errors(#reservation_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)); + }; + case(?val){ + state.nft_reservation_size -= 1; + Map.delete(state.nft_reservation, Map.thash, val.namespace); + namespace := "removed -> " # val.namespace; + results.add(#add(val.namespace)); + }; + }; + }; + case(#addNFTs(val)){ + //D.print("manage_reservation" # "\n" #"addNFTs : " # debug_show(val.namespace)); + + let res = Map.get(state.nft_reservation, Map.thash, val.namespace); + switch(res){ + case(null){ + return #err(Types.errors(#reservation_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)); + }; + case(?v){ + + let nftsToBe = Buffer.Buffer(0); + for (i in v.nfts.vals()){ + nftsToBe.add(i); + }; + for(this_item in val.nfts.vals()){ + var add = true; + label search for(thatItem in v.nfts.vals()){ + if(this_item == thatItem){ + add := false; + break search; + }; + }; + if(add == true){ + nftsToBe.add(this_item); + }; + }; + let nftsArray = nftsToBe.toArray(); + + let insert = { + namespace = v.namespace; + reservation_type = v.reservation_type; + exclusive = v.exclusive; + nfts = nftsArray; + }; + Map.set( + state.nft_reservation, + Map.thash, + val.namespace, + insert + ); + namespace := v.namespace; + reservation_type := v.reservation_type; + exclusive := v.exclusive; + nfts_size := nftsArray.size(); + + results.add(#addNFTs(nfts_size)); + + /* D.print("manage_reservation" # "\n" # + "addNFTs : " # debug_show(val.namespace) # "\n\n" # + "res : " # debug_show(res) # "\n\n" # + "val.nfts : " # debug_show(val.nfts) # "\n\n" # + "res.nfts : " # debug_show(v.nfts) # "\n\n" # + "NFTs to be : " # debug_show(nftsToBe.toArray()) # "\n\n" # + "state.nft_reservation : " # debug_show(state.nft_reservation) # "\n\n" + ); */ + }; + }; + }; + case(#removeNFTs(val)){ + //D.print("manage_reservation" # "\n" #"removeNFTs : " # debug_show(val.namespace)); + + let res = Map.get(state.nft_reservation, Map.thash, val.namespace); + switch(res){ + case(null){ + return #err(Types.errors(#reservation_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)); + }; + case(?v){ + let nftsToBe = Buffer.Buffer(0); + + for(this_item in v.nfts.vals()){ + var add = true; + label search for(thatItem in val.nfts.vals()){ + if(this_item == thatItem){ + add := false; + break search; + }; + }; + if(add == true){ + nftsToBe.add(this_item); + }; + }; + + let nftsArray = nftsToBe.toArray(); + let insert = { + namespace = v.namespace; + reservation_type = v.reservation_type; + exclusive = v.exclusive; + nfts = nftsArray; + }; + Map.set( + state.nft_reservation, + Map.thash, + val.namespace, + insert + ); + namespace := v.namespace; + reservation_type := v.reservation_type; + exclusive := v.exclusive; + nfts_size := nftsArray.size(); + results.add(#removeNFTs(nfts_size)); + + /* D.print("manage_reservation" # "\n" # + "removeNFTs : " # debug_show(val.namespace) # "\n\n" # + "res : " # debug_show(res) # "\n\n" # + "val.nfts : " # debug_show(val.nfts) # "\n\n" # + "res.nfts : " # debug_show(v.nfts) # "\n\n" # + "NFTs to be : " # debug_show(nftsToBe.toArray()) # "\n\n" # + "state.nft_reservation : " # debug_show(state.nft_reservation) # "\n\n" + ); */ + }; + }; + }; + case(#update_type(val)){ + //D.print("manage_reservation" # "\n" #"update_type : " # debug_show(val.namespace)); + let res = Map.get(state.nft_reservation, Map.thash, val.namespace); + + switch(res){ + case(null){ + return #err(Types.errors(#reservation_item_does_not_exists, "does not exists in sales canister " # val.namespace, ?msg.caller)); + }; + case(?v){ + let insert = { + namespace = v.namespace; + reservation_type = val.reservation_type; + exclusive = v.exclusive; + nfts = v.nfts; + }; + Map.set( + state.nft_reservation, + Map.thash, + val.namespace, + insert + ); + namespace := v.namespace; + reservation_type := val.reservation_type; + exclusive := v.exclusive; + nfts_size := v.nfts.size(); + + results.add(#update_type(namespace)); + + /* D.print("manage_reservation" # "\n" # + "update_type : " # debug_show(val.namespace) # "\n\n" # + "res : " # debug_show(res) # "\n\n" # + "state.nft_reservation : " # debug_show(state.nft_reservation) # "\n\n" + ); */ + }; + }; + + }; + + }; + }; + + return #ok({ + total_size = results.size(); + items = results.toArray(); + }); + // return #err(Types.errors(#nyi, "manage_reservation nyi", ?msg.caller)); + }; + + private func get_groups_for_user(user: Principal, groups : Types.Groups) : [Types.Group]{ + //D.print("in get groups" # debug_show(groups)); + var results = RBU.RBTree(Text.compare); + + for(thisGroup in Map.entries(groups)){ + //D.print("looking for " # debug_show(user, thisGroup.1.members) # " in " # thisGroup.1.namespace); + if(thisGroup.1.namespace == "" and Map.size(thisGroup.1.members) == 0){ + results.put(thisGroup.1.namespace, thisGroup.1); + } else { + switch(Map.get(thisGroup.1.members, Map.phash, user)){ + case(?val){ + + results.put(thisGroup.1.namespace, thisGroup.1); + }; + case(null){}; + } + }; + }; + + //D.print("d " # debug_show(Iter.size(results.entries()))); + + return Iter.toArray(Iter.map<(Text,Types.Group),Types.Group>(results.entries(), func(item){item.1})); + }; + + private func intersect_user_groups_reservations(user : Principal, groups: Types.Groups, reservations: Types.Reservations) : { + groups: [Types.Group]; + group_reservations: [Types.Reservation]; + personal_reservations: [Types.Reservation] + }{ + //D.print("in intersect"); + let user_groups = get_groups_for_user(user, groups); + //D.print("have user groups" # debug_show(user_groups)); + + + //todo: look through reservations + let personal_reservations = Buffer.Buffer(1); + let group_reservations = Buffer.Buffer(1); + + //D.print("testing reservations" # debug_show(reservations)); + for(thisRes in Map.vals(reservations)){ + switch(thisRes.reservation_type){ + case(#Principal(a_user)){ + //D.print("testing principal"); + if(a_user == user){ + personal_reservations.add(thisRes); + } + }; + case(#Groups(a_group)){ + //D.print("testing Group"); + for(thisGroup in a_group.vals()){ + let search = Array.filter(user_groups, func(a){a.namespace == thisGroup}); + if(search.size() > 0){ + group_reservations.add(thisRes); + } + }; + } + }; + }; + + return{ + groups = user_groups; + personal_reservations = personal_reservations.toArray(); + group_reservations = group_reservations.toArray(); + }; + }; + + + private func calc_user_purchase_graph(user : Principal, groups: Types.Groups, reservations: Types.Reservations, inventory: Types.NFTInventory, purchases : Types.Purchases) : { + prices: [(?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])]; //token, max_allowed, (amount, number) + personal_reservations: ([Types.Reservation], Nat, Nat); + group_reservations: ([Types.Reservation], Nat, Nat); + purchases: [(Text, NFTTypes.TransactionRecord)]; + }{ + //D.print("creating graph for " #debug_show(user, groups, reservations, inventory, purchases)); + + //D.print("reservation deatil " #debug_show(reservations)); + let user_info = intersect_user_groups_reservations(user, groups, reservations); + //D.print("have info for " #debug_show(user_info)); + + //this collection keeps track of the max allwed for the user and the breakup of prices if they get a price break + // ie Max allowed: 4; pricies [(20OGY, 2 items),(30OGY, 2 items)] + type tracker = { + var max_allowed : ?Nat; + var prices: Map.Map; //price amount , number, can be null + }; + + var token_map = Map.new(); + + + //D.print("at token map"); + + //lets you do a comparison with null tokens because null token means free + let hash_null_token : ((?Types.TokenSpec) -> Nat, (?Types.TokenSpec, ?Types.TokenSpec) -> Bool) = ( + func(a : ?Types.TokenSpec) : Nat { + switch(a){ + case(null){ + return 0; + }; + case(?val){ + return NFTTypes.token_hash(val); + }; + } + } + , + func(a : ?Types.TokenSpec,b: ?Types.TokenSpec) : Bool { + switch(a,b){ + case(null,null){ + return true; + }; + + case(?val, ?val2){ + return NFTTypes.token_compare(val,val2) == #equal; + }; + case(_){ + return false; + } + }; + + }); + + let compare_null_tokens = func(a : ?Types.TokenSpec,b: ?Types.TokenSpec) : Order.Order { + switch(a,b){ + case(null,null){ + return #equal; + }; + case(null, ?val){ + return #less; + }; + case(?val, null){ + return #greater; + }; + case(?val, ?val2){ + return NFTTypes.token_compare(val,val2); + }; + }; + + }; + + //adds the pricing to the colletion + let addPricing = func(aGroup : Types.Group){ + //D.print("adding Pricing" # debug_show(aGroup)); + + for(thisPricing in aGroup.pricing.vals()){ + //D.print("looking at pricing"); + let thisPricingToken : ?Types.TokenSpec = switch(thisPricing){ + case(#free){null}; + case(#cost_per(data)){?data.token}; + }; + switch(Map.get(token_map, hash_null_token, thisPricingToken)){ + case(null){ + //we don't have this pricing yet + //D.print("not in map"); + let this_tracker = { + var max_allowed : ?Nat = aGroup.allowed_amount; + var prices = Map.new(); + }; + switch(thisPricing){ + case(#free){ + Map.set(this_tracker.prices, Map.nhash, 0, aGroup.allowed_amount); + }; + case(#cost_per(detail)){ + Map.set(this_tracker.prices, Map.nhash, detail.amount, aGroup.allowed_amount); + } + }; + Map.set(token_map, hash_null_token, thisPricingToken, this_tracker); + }; + case(?existing_map){ + //D.print("exists"); + let existing_allowed_amount = existing_map.max_allowed; + switch(existing_allowed_amount, aGroup.allowed_amount){ + case(null, null){ + existing_map.max_allowed := null; + switch(thisPricing){ + case(#free){ + Map.set(existing_map.prices, Map.nhash, 0, null); + }; + case(#cost_per(detail)){ + Map.set(existing_map.prices, Map.nhash, detail.amount, null); + }; + }; + }; + case(?new, null){ + existing_map.max_allowed := null; + switch(thisPricing){ + case(#free){ + Map.set(existing_map.prices, Map.nhash, 0, null); + }; + case(#cost_per(detail)){ + Map.set(existing_map.prices, Map.nhash, detail.amount, null); + }; + }; + }; + case(null, ?old){ + existing_map.max_allowed := null; + switch(thisPricing){ + case(#free){ + Map.set(existing_map.prices, Map.nhash, 0, null); + }; + case(#cost_per(detail)){ + Map.set(existing_map.prices, Map.nhash, detail.amount, null); + }; + }; + }; + case(?new, ?old){ + let thisPricingAmount = switch(thisPricing){case(#free){0};case(#cost_per(detail)){detail.amount}}; + existing_map.max_allowed := if(aGroup.additive == true){ + switch(Map.get(existing_map.prices, Map.nhash, thisPricingAmount)){ + case(null){ + Map.set(existing_map.prices, Map.nhash, thisPricingAmount, aGroup.allowed_amount); + }; + case(?val){ + //price already exists and additive + Map.set(existing_map.prices, Map.nhash, thisPricingAmount, ?(old + new)); + } + }; + ?(new + old); + } else { + if(new > old){ + Map.set(existing_map.prices, Map.nhash, thisPricingAmount, ?new); + ?new; + } else { + Map.set(existing_map.prices, Map.nhash, thisPricingAmount, ?old); + ?old; + }; + }; + switch(thisPricing){ + case(#free){ + Map.set(existing_map.prices, Map.nhash, 0, + if(aGroup.additive == true){ + ?(new + old); + } else { + if(new > old){ + ?new; + } else { + ?old; + }; + }); + }; + case(#cost_per(detail)){ + Map.set(existing_map.prices, Map.nhash, detail.amount, if(aGroup.additive == true){ + ?(new + old); + } else { + if(new > old){ + ?new; + } else { + ?old; + }; + }); + }; + }; + + }; + }; + + + }; + }; + }; + }; + + //D.print("chekcing groups" # debug_show(Iter.toArray(user_info.groups.vals()))); + + //todo...may need to sort these so that all the non-additive ones are first + + for(thisGroup in user_info.groups.vals()){ + if(thisGroup.namespace == ""){ + //this is the default group and eveyone gets to participate in it unless there are members + //D.print("found default group"); + if(Map.size(thisGroup.members) == 0){ + + addPricing(thisGroup); + } else { + + if(Option.isSome(Map.get(thisGroup.members, Map.phash, user))){ + //we are a part of this group + addPricing(thisGroup); + }; + + } + } else { + //D.print("found a group"); + addPricing(thisGroup); + } + }; + + + //D.print("returning pricing " # debug_show(token_map)); + + + return { + prices : [(?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])] = Iter.toArray<(?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])>( + Iter.map<(?Types.TokenSpec,tracker), (?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])>( + Map.entries(token_map), + func(item){ + (item.0, + item.1.max_allowed, + Iter.toArray<(Nat,?Nat)>(Map.entries(item.1.prices)))})); + personal_reservations = (user_info.personal_reservations, 0, 0); + group_reservations = (user_info.group_reservations,0, 0); + purchases : [(Text, NFTTypes.TransactionRecord)] = switch(Map.get>(state.user_purchases, Map.phash, user)){ + case(null){[]}; + case(?val){Iter.toArray(Map.entries(val))}; + }; + } + }; + + // deposit an escrow + // allocate a set of nfts for payment + public shared(msg) func allocate_sale_nft_origyn(request: Types.AllocationRequest) : async Result.Result{ + //check to see if the max allocation is hit + //see of the principal had an old allocation, if so, make it available + //search for a random qualifying item, make available = false + + + //make sure that the caller is the principal + if(msg.caller != request.principal and msg.caller != state.owner and Option.make(msg.caller) != state.manager){ + return #err(Types.errors(#unauthorized_access, "allocate_sale_nft_origyn - must be the caller ", ?msg.caller)); + }; + + D.print("in allocate"); + if(request.number_to_allocate == 0){ + return #err(Types.errors(#improper_allocation, "allocate_sale_nft_origyn - cannot allocate 0 items ", ?msg.caller)); + }; + + //clear out expired allocations + D.print("cleaning"); + let clean_result = expire_allocations(); + if(clean_result == false){ + //todo: the queue has gotten too full and we should really clear it out + //do a one shot call to self and return an error + }; + + //there has to e some kind of max allocation here + if(request.number_to_allocate > 50){ //temporary...only allow purchasing 50 at a time + return #err(Types.errors(#improper_allocation, "allocate_sale_nft_origyn - cannot allocate more than items...geez, you greedy gus ", ?msg.caller)); + }; + + //see how many the user can buy and at what price + + let {user_info = user_info; allocation_size = allocation_size} = get_possible_purchases(request.principal, request.token, request.number_to_allocate); + + + //todo: check if they arleady have an allocation...need to put those items back + let current_allocation = switch(Map.get(state.user_allocations, Map.phash, request.principal)){ + case(null){ + //reserve the nfts + let new_allocation = { + principal = request.principal; + var token = request.token; + var nfts : [Text]= []; + var expiration = get_time() + state.allocation_expiration; + }; + Map.set(state.user_allocations, Map.phash, request.principal, new_allocation); + new_allocation; + }; + case(?val){ + //release the old nfts + for(this_item in val.nfts.vals()){ + switch(Map.get(state.nft_inventory, Map.thash, this_item)){ + case(null){ + //should be unreachable + }; + case(?nft){ + if(nft.available == false){ + nft.available := true; + nft.allocation := null; + }; + }; + }; + + }; + //set the token to the new requested token + val.token := request.token; //is this the best place to do this? + val; + }; + }; + + //D.print("current_allocation" # debug_show(current_allocation)); + + //Adjust allocation size by existing purchases + let purchases = Map.get>(state.user_purchases, Map.phash, request.principal); + + + let reserved = RBU.RBTree(Text.compare); // token_id, group_id, ?price + //let group_count = RBU.RBTree(Text.compare); + //cycle through resevations and see if the user has some of these reseved + //we need to find the cheapest reservations first + + //todo: in the future we'll want to find the full graph of possibilites and then + //do some randomization...for now keep it simpler + //find some available and allocate them + label searchPersonal for(thisReservation in user_info.personal_reservations.0.vals()){ + if(Iter.size(reserved.entries()) == allocation_size){ + break searchPersonal; + }; + for(this_nft in thisReservation.nfts.vals()){ + if(Iter.size(reserved.entries()) == allocation_size){ + break searchPersonal; + }; + //check to see if it is available + switch(Map.get(state.nft_inventory, Map.thash, this_nft)){ + case(null){ + //should be unreachable + }; + case(?nft){ + if(nft.available == true){ + nft.available := false; + nft.allocation := ?request.principal; + reserved.put(this_nft, null); + if(Iter.size(reserved.entries())== allocation_size){ + break searchPersonal; + }; + }; + }; + }; + }; + }; + + //D.print("personal done" # debug_show(Iter.toArray(reserved.entries()))); + + + label searchGroup for(thisReservation in user_info.group_reservations.0.vals()){ + if(Iter.size(reserved.entries()) == allocation_size){ + break searchGroup; + }; + for(this_nft in thisReservation.nfts.vals()){ + if(Iter.size(reserved.entries()) == allocation_size){ + break searchGroup; + }; + //check to see if it is available + switch(Map.get(state.nft_inventory, Map.thash, this_nft)){ + case(null){ + //should be unreachable + }; + case(?nft){ + if(nft.available == true){ + nft.available := false; + nft.allocation := ?request.principal; + //todo: check the group for pricing + reserved.put(this_nft, null); + + if(Iter.size(reserved.entries()) == allocation_size){ + break searchGroup; + }; + }; + }; + }; + }; + }; + + //D.print("group done" # debug_show(Iter.toArray(reserved.entries()))); + + + //todo: we should check to see if this user actually has that balance(maybe need to request subaccount here?) + current_allocation.nfts := Iter.toArray(Iter.map<(Text, ?Nat), Text>(reserved.entries(), func(item){item.0})); + state.allocation_queue := Deque.pushBack<(Principal, Int)>(state.allocation_queue, (current_allocation.principal, current_allocation.expiration)); + + if(current_allocation.nfts.size() == 0){ + return #err(Types.errors(#inventory_empty, "allocate_sale_nft_origyn - inventory is empty ", ?msg.caller)); + }; + + return #ok({ + allocation_size = current_allocation.nfts.size(); + token = request.token; + principal = request.principal; + expiration = current_allocation.expiration; + }); + // return #err(Types.errors(#nyi, "allocate_nfts nyi", ?msg.caller)); + }; + + + + // takeas an escrow receipt and attempts the instant transfer of the allocation + // creator will need to set a redeem_at_a_time variable that dictates the number of xcanister calls that can + // happen at once. Should use a batch market transfer function + + public shared(msg) func redeem_allocation_sale_nft_origyn(request: Types.RedeemAllocationRequest) : async Result.Result{ + + D.print("in redeem="); + // ToDo: + // Need to validate the allocation has not expired + // redeem_allocation is the actual sale, once that function has validated the allocation has not expired it should call market_transfer_nft_origyn on the nft canister with #instant and provide the escrow receipt. + + let found_allocation = switch(Map.get(state.user_allocations, Map.phash, msg.caller)){ + case(null){ + return #err(Types.errors(#allocation_does_not_exist, "redeem_allocation_sale_nft_origyn - cant find allocation for " # debug_show(msg.caller), ?msg.caller)); + }; + case(?found){ + if(found.expiration < get_time()){ + return #err(Types.errors(#allocation_does_not_exist, "redeem_allocation_sale_nft_origyn - expired allocation for " # debug_show(msg.caller), ?msg.caller)); + }; + if(found.nfts.size() == 0){ + return #err(Types.errors(#allocation_does_not_exist, "redeem_allocation_sale_nft_origyn - found allocation but it was empty " # debug_show(msg.caller), ?msg.caller)); + + }; + found; + }; + }; + + D.print("found_allocation" # debug_show(found_allocation)); + + //validate the escrow + let nft_gateway : NFTTypes.Service = switch(state.nft_gateway){ + case(null){return #err(Types.errors(#bad_config, "redeem_allocation_sale_nft_origyn - bad gateway config null", ?msg.caller));}; + case(?val){actor(Principal.toText(val));} + }; + D.print("gateway os " # debug_show(state.nft_gateway)); + + //get the allocations and build the transfers by price + var transfers = Buffer.Buffer(found_allocation.nfts.size()); + + D.print("getting user info"); + let user_info = calc_user_purchase_graph(msg.caller, state.nft_group, state.nft_reservation, state.nft_inventory, state.user_purchases); + D.print("getting user info" # debug_show(user_info)); + + //todo: advance the allocation past the number of purchases so we don't over allocate the second time through. + let remove_specs = Array.filter<(?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])>(user_info.prices, func(item){ + switch(item.0){ + case(null){true};//free + case(?val){NFTTypes.token_eq(val, request.escrow_receipt.token)}; + }; + }); + + let flat_price = do{ + let result = Buffer.Buffer<(Nat,?Nat)>(1); + for(this_item in remove_specs.vals()){ + for(thisDetail in this_item.2.vals()){ + result.add(thisDetail); + }; + }; + + Array.sort<(Nat,?Nat)>(result.toArray(), func(a : (Nat, ?Nat),b:(Nat, ?Nat)) : Order.Order{ return Nat.compare(a.0,b.0)}); + }; + + D.print("have flat price" # debug_show(flat_price)); + + //prices: [(?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])]; + + var balance_remaining = request.escrow_receipt.amount; + D.print("balance_remaining" # debug_show(balance_remaining)); + let bought_list = Buffer.Buffer<(Text,Nat)>(1); + var available_nfts = List.fromArray(found_allocation.nfts); + for(this_item in flat_price.vals()){ + D.print("testing " # debug_show(this_item)); + let this_price = this_item.0; + D.print("have price" # debug_show(this_price)); + let this_number = switch(this_item.1){ + case(null){ //this means the user has reached a price where they can allocate up to as many as they want + D.print("unlimited allocation at " # debug_show((this_price, bought_list.size(), available_nfts))); + var tracker = 0; + label builder while(balance_remaining >= this_price and bought_list.size() < found_allocation.nfts.size()){ + let anNFT = List.pop(available_nfts); + available_nfts := anNFT.1; + switch(anNFT.0){ + case(null){break builder}; + case(?anNFT){ + bought_list.add(anNFT, this_price); + balance_remaining -= this_price; + }; + }; + + if(tracker > 1000){break builder}; + tracker += 1; + }; + }; + case(?val){ + //there are a set number at this price...try to fill until you get to the end + //D.print("have a set number " # debug_show(val)); + label builder for(this_item in Iter.range(1, val)){ + //D.print("running iter" # debug_show(this_item, balance_remaining, this_price, bought_list.size(), found_allocation)); + if(balance_remaining < this_price or bought_list.size() >= found_allocation.nfts.size()){ + D.print("breaking builder 1"); + break builder; + }; + + //D.print("poping builder"); + + let anNFT = List.pop(available_nfts); + available_nfts := anNFT.1; + switch(anNFT.0){ + case(null){ + D.print("breaking builde 2r"); + break builder}; + case(?anNFT){ + bought_list.add(anNFT, this_price); + balance_remaining -= this_price; + D.print("balance_remaining loop" # debug_show(balance_remaining)); + }; + }; + + + }; + }; + } + }; + + if(bought_list.size() == 0){ + D.print("nothing int he list"); + return #err(Types.errors(#improper_escrow, "redeem_allocation_sale_nft_origyn - improper_escrow - not large enough for one purchase " # debug_show(request.escrow_receipt), ?msg.caller)); + }; + + for(this_item in bought_list.vals()){ + transfers.add({ + token_id = this_item.0; + sales_config = { + escrow_receipt = ?{ + amount = this_item.1; + seller = request.escrow_receipt.seller; + buyer = request.escrow_receipt.buyer; + token_id = request.escrow_receipt.token_id; + token = request.escrow_receipt.token; + }; + broker_id = null; + pricing = #instant; + }; + }) + }; + + //try the purchase + D.print("about to send transfer" # debug_show(transfers.toArray())); + let transfer_result = await nft_gateway.market_transfer_batch_nft_origyn(transfers.toArray()); + D.print("result was" # debug_show(transfer_result)); + + //process the results + + let results = Buffer.Buffer<{token_id: Text; transaction: Result.Result}>(1); + var tracker = 0; + D.print("the inventory " # debug_show(Iter.toArray(Map.entries(state.nft_inventory)))); + for(thisResponse in transfer_result.vals()){ + switch(thisResponse){ + case(#ok(trx)){ + let token_id = trx.token_id; + D.print("the inventory " # debug_show(Iter.toArray(Map.entries(state.nft_inventory)))); + let inventory = switch(Map.get(state.nft_inventory, Map.thash, token_id)){ + case(null){ + results.add({ + token_id = token_id; + transaction = #err(Types.errors(#bad_canister_trx, "redeem_allocation_sale_nft_origyn - a transaction was returned for an item that is not in inventory " # debug_show(trx), ?msg.caller)); + }); + }; + case(?item){ + found_allocation.nfts := Array.filter(found_allocation.nfts, func(anitem: Text){ anitem != token_id}); + item.sale_block := ?trx.index; + item.allocation := null; + results.add({ + token_id = token_id; + transaction = #ok(trx); + }); + }; + }; + }; + case(#err(err)){ + + results.add({ + token_id = bought_list.get(tracker).0; + transaction = #err(Types.errors(#bad_canister_trx, "redeem_allocation_sale_nft_origyn - tranasaction returned an error " # debug_show(bought_list.get(tracker).0, err), ?msg.caller)); + }); + + } + }; + tracker += 1; + }; + + return #ok({ + nfts = results.toArray(); + }); + + // return #err(Types.errors(#nyi, "redeem_allocation nyi", ?msg.caller)); + }; + + private func get_possible_purchases(caller : Principal, token : ?NFTTypes.TokenSpec, number_to_allocate: Nat) : {user_info: + { + prices: [(?Types.TokenSpec, ?Nat, [(Nat, ?Nat)])]; //token, max_allowed, (amount, number) + personal_reservations: ([Types.Reservation], Nat, Nat); + group_reservations: ([Types.Reservation], Nat, Nat); + purchases: [(Text, NFTTypes.TransactionRecord)]; + }; allocation_size: Nat}{ + + //check to see if the user has any groups and reservations. + //D.print("getting user info"); + let user_info = calc_user_purchase_graph(caller, state.nft_group, state.nft_reservation, state.nft_inventory, state.user_purchases); + //D.print("getting user info" # debug_show(user_info)); + + + + + var highest_found : ?Nat = ?0; + label search for(thisPricing in user_info.prices.vals()){ + switch(thisPricing.0, token){ + case(null, null){ + //free item can pass + }; + case(?thisPricing, null){ + //user has requested only free items, skip + continue search; + + }; + case(?thisPricing, ?requestedPricing){ + if(NFTTypes.token_eq(thisPricing, requestedPricing) == false){ + continue search; + }; + }; + case(null, ?requestedPricing){ + //free item can pass + }; + }; + + switch(thisPricing.1, highest_found){ + case(null, ?current){ + highest_found := null; + break search; + }; + case(?val, ?current){ + if(val > current){ + highest_found := ?val + }; + }; + case(_,_){ + //should be unreachable + }; + }; + }; + //D.print("highest found" # debug_show(highest_found)); + + let allocation_size = switch(highest_found){ + case(null){number_to_allocate}; + case(?val){ + if(val > number_to_allocate){ + number_to_allocate; + } else { + val; + }}; + }; + + return {user_info = user_info; allocation_size = allocation_size}; + }; + + //deposit + //now if the mint is delayed we'll need to talk about what happens then. + public shared(msg) func register_escrow_sale_nft_origyn(request: Types.RegisterEscrowRequest) : async Result.Result{ + + D.print("In register escrow " # debug_show(request)); + //check the max requested has a positive amount + if(request.max_desired == 0){ + return #err(Types.errors(#improper_escrow, "register_escrow_sale_nft_origyn - max_requested must be greater than 0 " # debug_show(request), ?msg.caller)); + }; + + + switch(request.escrow_receipt){ + case(null){}; + case(?val){ + //validate the escrow + if(val.amount == 0){ + return #err(Types.errors(#improper_escrow, "register_escrow_sale_nft_origyn - amount must be greater than 0 " # debug_show(request), ?msg.caller)); + }; + + if(NFTTypes.account_eq(val.buyer, #principal(msg.caller))){//todo: allow manager + return #err(Types.errors(#improper_escrow, "register_escrow_sale_nft_origyn - buyer must be sender " # debug_show(request), ?msg.caller)); + }; + + //validate the token used in in the pricing + }; + }; + + D.print("script reciept validated " # debug_show(true)); + + + let {user_info = user_info; allocation_size = allocation_size} = get_possible_purchases(msg.caller, switch(request.escrow_receipt){case(null){null;};case(?val){?val.token;}}, request.max_desired); + + D.print("have usr info " # debug_show(user_info, allocation_size)); + + + if(allocation_size ==0){ + return #err(Types.errors(#improper_allocation, "register_escrow_sale_nft_origyn - no valid allocation found " # debug_show(request), ?msg.caller)); + }; + + let current_reg = switch(request.escrow_receipt){ + case(null){ + //only put in if the user qualifed for some free items + D.print("handling fee items " # debug_show(true)); + //add the registrations + switch(Map.get(state.user_registrations, Map.phash, request.principal)){ + case(null){ + let new_reg = { + principal = request.principal; + var max_desired= request.max_desired; + var escrow_receipt = request.escrow_receipt; + var allocation_size = allocation_size; + var allocation = Map.new(); + }; + Map.set(state.user_registrations, Map.phash, request.principal, new_reg ); + new_reg; + }; + case(?val){ + //this already exists + val.max_desired := request.max_desired; + val.escrow_receipt := request.escrow_receipt; + val.allocation_size := allocation_size; + val; + } + }; + }; + case(?val){ + //check that the escrow is valid + + D.print("found items " # debug_show(val)); + + let nft_canister : NFTTypes.Service = switch(state.nft_gateway){ + case(null){return #err(Types.errors(#bad_config, "register_escrow_sale_nft_origyn - no gateway ", ?msg.caller));}; + case(?val){actor(Principal.toText(val))}; + }; + + //are there enough free items to cover the amount allocated? + + let balance = switch(await nft_canister.balance_of_nft_origyn(#principal(msg.caller))){ + case(#err(err)){return #err(Types.errors(#improper_escrow, "register_escrow_sale_nft_origyn - error checking balance " # debug_show(request, err), ?msg.caller))}; + case(#ok(val)){val}; + }; + + D.print("have balance " # debug_show(balance)); + + var found : Bool = false; + label search for(this_item in balance.escrow.vals()){ + if( + val.seller == this_item.seller and + val.buyer == this_item.buyer and + + val.token_id == this_item.token_id and + null == this_item.sale_id and + this_item.lock_to_date == state.required_lock_date and + val.amount <= this_item.amount and + NFTTypes.token_eq(val.token, this_item.token) + ){ + found :=true; + break search; + }; + }; + + if(found == false){ + return #err(Types.errors(#improper_escrow, "register_escrow_sale_nft_origyn - cannot find escrow " # debug_show(request), ?msg.caller)); + }; + + //add the registrations + switch(Map.get(state.user_registrations, Map.phash, request.principal)){ + case(null){ + let new_reg = { + principal = request.principal; + var max_desired= request.max_desired; + var escrow_receipt = request.escrow_receipt; + var allocation_size = allocation_size; + var allocation = Map.new(); + }; + Map.set(state.user_registrations, Map.phash, request.principal, new_reg ); + new_reg; + }; + case(?val){ + //this already exists + val.max_desired := request.max_desired; + val.escrow_receipt := request.escrow_receipt; + val.allocation_size := allocation_size; + val; + } + }; + + }; + }; + + + + D.print("about to iter"); + + let iter1 = Map.entries(current_reg.allocation); + let iter2 = Iter.map<(Text, Types.RegistrationClaim), Types.RegisterEscrowAllocationDetail>(iter1, Types.stabalize_xfer_RegisterAllocation); + return (#ok({ + + allocation = Iter.toArray(iter2); + max_desired = current_reg.max_desired; + escrow_receipt = request.escrow_receipt; + allocation_size = current_reg.allocation_size; + principal = current_reg.principal; + })); + + // return #err(Types.errors(#nyi, "register_escrow nyi", ?msg.caller)); + }; + + public shared(msg) func execute_claim_sale_nft_origyn(token_id : Text) : async Result.Result{ + + return #err(Types.errors(#nyi, "not implemented", ?msg.caller)); + }; + + // Helper functions + + public query(msg) func get_total_inventory_tree() : async Result.Result<[Types.NFTInventoryItemDetail], Types.OrigynError>{ + let iter1 = Map.entries(state.nft_inventory); + let iter2 = Iter.map<(Text, Types.NFTInventoryItem), Types.NFTInventoryItemDetail>(iter1, Types.stabalize_xfer_NFTInventoryItem); + return #ok(Iter.toArray(iter2)); + }; + + // Add to inventory + public shared(msg) func add_inventory_item(request: Types.NFTInventoryItemRequest) : async Result.Result{ + + + if(msg.caller != state.owner){ + return #err(Types.errors(#unauthorized_access, "add_inventory_item only owner can manage sale canister", ?msg.caller)) + }; + + + + Map.set(state.nft_inventory, Map.thash, request.token_id, { + canister = request.canister; + token_id = request.token_id; + var available = true; + var sale_block = null; + var allocation = null; + var reservations = Map.new(); // + }); + // //D.print("nft_inventory.put : " # "\n" # + // "result :" # debug_show(result) + // ); + return #ok("success"); + }; + // Get inventory + public query(msg) func get_inventory_item_sale_nft_origyn(key: Text) : async Result.Result { + + // ToDo: Need to find the right type to return #ok(item) + let item = Map.get(state.nft_inventory, Map.thash, key); + switch(item){ + case(?val){ + return #ok(Types.stabalize_xfer_NFTInventoryItem((val.token_id, val))); + }; + case(null){ + return #err(Types.errors(#inventory_item_does_not_exists, "get_inventory_item_sale_nft_origyn - cant find token_id in inventory", ?msg.caller)); + }; + }; + + }; + + // Get inventory size + public query func get_inventory_size_sale_nft_origyn() : async Result.Result { + + let s = Map.size(state.nft_inventory); + + return #ok(s); + }; + + //get inventory + public query func get_inventory_sale_nft_origyn(start: ?Nat, size: ?Nat) : async Result.Result{ + + var size_requested = switch(size){case(null){Map.size(state.nft_inventory)}; case(?val){val}}; + if(size_requested > 10000){ + size_requested := 10000 + }; + let results = Buffer.Buffer(size_requested); + + var this_start = switch(start){ + case(null){0}; + case(?val){val}; + }; + + var tracker : Nat = 0; + label search for(this_item in Map.vals(state.nft_inventory)){ + + if(tracker >= this_start){ + results.add(Types.stabalize_xfer_NFTInventoryItem((this_item.token_id, this_item))); + }; + tracker += 1; + if(results.size() >= size_requested){break search}; + }; + + return #ok({ + total_size = Map.size(state.nft_inventory); + items = results.toArray(); + start = this_start; + }); + }; + + // Groups + + // public shared(msg) func add_group_item(request: Types.AddGroupRequest) : async Result.Result{ + + + // nft_group := Map.set(nft_group ,Text.compare, request.key, request.item); + // // //D.print("nft_inventory.put : " # "\n" # + // // "result :" # debug_show(result) + // // ); + // return #ok("success"); + // }; + + // // Get group size + public query func get_group_size() : async Result.Result { + + let s = state.nft_group_size; + + return #ok(s); + }; + + + // Reservations + public query(msg) func get_total_reservations_tree() : async Result.Result<[(Text, Types.Reservation)], Types.OrigynError>{ + + return #ok(Iter.toArray(Map.entries(state.nft_reservation))); + }; + + public shared (msg) func __advance_time(new_time: Int) : async Int { + + if(msg.caller != state.owner){ + throw Error.reject("not owner"); + }; + __test_time := new_time; + return __test_time; + + }; + + public shared (msg) func __set_time_mode(newMode: {#test; #standard;}) : async Bool { + if(msg.caller != state.owner){ + throw Error.reject("not owner"); + }; + __time_mode := newMode; + return true; + }; + + private func expire_allocations (): Bool{ + var tracker = 0; + label clean while(1==1){ + switch(Deque.peekFront<(Principal,Int)>(state.allocation_queue)){ + case(null){}; + case(?val){ + //D.print("found item at the front" # debug_show(val.1, get_time())); + if(val.1 < get_time()){ + //D.print("cleaning"); + + let result = Deque.popFront<(Principal,Int)>(state.allocation_queue); + switch(result){ + case(null){};//unreachable + case(?existing){ + state.allocation_queue := existing.1; + switch(Map.get(state.user_allocations, Map.phash, existing.0.0)){ + case(null){}; //already cleared + case(?found){ + //release the old nfts + for(this_item in found.nfts.vals()){ + switch(Map.get(state.nft_inventory, Map.thash, this_item)){ + case(null){ + //should be unreachable + }; + case(?nft){ + //D.print("returning to pool " # this_item); + if(nft.available == false){ + nft.available := true; + nft.allocation := null; + }; + }; + }; + + }; + }; + }; + }; + }; + } else { + return true; + }; + }; + }; + if(tracker > 10000){ + return false; //returing false shoul lead to a one shot call to self + }; + tracker +=1; + }; + return true; + }; + + + + //query allocation + + public query (msg) func get_allocation_sale_nft_origyn(principal: Principal) : async Result.Result{ + + //todo: Secure so only msg.caller or owner/manager can call this + + switch( Map.get(state.user_allocations, Map.phash, principal)){ + case(?val){ + if(get_time() > val.expiration){ + //item is expired...pretend it doesn't exist + return #err(Types.errors(#allocation_does_not_exist, "get_allocation_sale_nft_origyn - cant find allocation expired", ?msg.caller)); + }; + if(val.nfts.size() == 0){ + return #err(Types.errors(#allocation_does_not_exist, "get_allocation_sale_nft_origyn - zero items allocated", ?msg.caller)); + }; + return (#ok({ + allocation_size = val.nfts.size(); + token = val.token; + principal = val.principal; + expiration = val.expiration; + })) + }; + case(null){ + return #err(Types.errors(#allocation_does_not_exist, "get_allocation_sale_nft_origyn - cant find principal in allocations", ?msg.caller)); + }; + }; + + }; + + //query groups + //query reservations + //query registrations + + public query (msg) func get_registration_sale_nft_origyn(principal : Principal) : async Result.Result{ + + //todo: Secure so only msg.caller or owner/manager can call this + D.print("geting reg balance" # debug_show(principal)); + + switch( Map.get(state.user_registrations, Map.phash, principal)){ + case(?val){ + + let iter1 = Map.entries(val.allocation); + let iter2 = Iter.map<(Text, Types.RegistrationClaim), Types.RegisterEscrowAllocationDetail>(iter1, Types.stabalize_xfer_RegisterAllocation); + return (#ok({ + + allocation = Iter.toArray(iter2); + max_desired = val.max_desired; + escrow_receipt = val.escrow_receipt; + allocation_size = val.allocation_size; + principal = val.principal; + })); + }; + case(null){ + return (#ok({ + + allocation = []; + max_desired = 0; + escrow_receipt = null; + allocation_size = 0; + principal = principal; + })); + }; + }; + + } + + + + + +} \ No newline at end of file diff --git a/src/origyn_sale_reference/types.mo b/src/origyn_sale_reference/types.mo new file mode 100644 index 0000000..941ee35 --- /dev/null +++ b/src/origyn_sale_reference/types.mo @@ -0,0 +1,634 @@ +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Buffer "mo:base/Buffer"; +import CandyTypes "mo:candy_0_1_10/types"; +import EXT "mo:ext/Core"; +import Iter "mo:base/Iter"; +import Deque "mo:base/Deque"; +import NFTTypes "../origyn_nft_reference/types"; +import Principal "mo:base/Principal"; +import Map "mo:map_6_0_0/Map"; +import Result "mo:base/Result"; +import SB "mo:stablebuffer_0_2_0/StableBuffer"; +import StableBuffer "mo:base/Buffer"; +// import DIP20_Interface "dip20_interface"; +// import DIP20_Interface "../origyn_nft_reference/dip20_interface" +module { + + public type OrigynError = {number : Nat32; text: Text; error: Errors; flag_point: Text;}; + + public type InitArgs = { + owner: Principal; //owner of the canister + allocation_expiration: Int; //amount of time to keep an allocation for 900000000000 = 15 minutes + nft_gateway: ?Principal; //the nft gateway canister this sales canister will sell NFTs for + sale_open_date : ?Int; //date that the NFTs in the registration shold be minted/allocated + registration_date: ?Int; //date that registations open up + end_date: ?Int; //date that the canister closes its sale + required_lock_date: ?Int //date that users must lock their tokens until to qualify for reservations + }; + + public type ManageCommand = { + #UpdateOwner : Principal; + #UpdateAllocationExpiration : Int; + #UpdateNFTGateway: ?Principal; + #UpdateSaleOpenDate: ?Int; + #UpdateRegistrationDate: ?Int; + #UpdateEndDate: ?Int; + #UpdateLockDate: ?Int; + }; + + public type NFTInventoryItem = { + canister: Principal; // principal that the nft is on + token_id: Text; // unique namespace of the item + var available: Bool; // if the item is available + var sale_block: ?Nat; // transaction id used to sell the item + var allocation : ?Principal; + var reservations : Map.Map; + + }; + + public type NFTInventoryItemDetail = { + canister: Principal; // principal that the nft is on + token_id: Text; // unique namespace of the item + available: Bool; // if the item is available + sale_block: ?Nat; // transaction id used to sell the item + allocation : ?Principal; + reservations : [(Text,Int)]; + }; + + public func stabalize_xfer_NFTInventoryItem(item : (Text, NFTInventoryItem)) : NFTInventoryItemDetail { + { + canister = item.1.canister; + token_id = item.1.token_id; + available = item.1.available; + sale_block = item.1.sale_block; + allocation = item.1.allocation; + reservations = Iter.toArray<(Text,Int)>(Map.entries(item.1.reservations)); + } + }; + + public type NFTInventoryItemRequest = { + canister: Principal; // principal that the nft is on + token_id: Text; // unique namespace of the item + }; + + // Is Text our best option for the key? + public type NFTInventory = Map.Map; + + + + public type GetInventoryItemResponse = NFTInventoryItem; + + public type GetInventoryResponse = { + total_size : Nat; + items : [NFTInventoryItemDetail]; + start : Nat; + }; + + public type Allocation = { + principal: Principal; + var token: ?TokenSpec; + var nfts: [Text]; + var expiration: Int; + }; + + public type Allocations = Map.Map; + + + + // public type ReservationStable = { + // namespace: Text; + // reservation_type : { + // #Groups : [Text] ; + // #Principal : Principal; + // }; + // exclusive: Bool; + // nfts: [NFTInventoryItem]; + // }; + + + + public type Purchases = Map.Map>; + + + + // Which is the right group?? + public type Groups = Map.Map; + + public type Group = { + namespace: Text; + var members: Map.Map; // + var redemptions: Map.Map; // + var pricing: Pricing; + var allowed_amount: ?AllowedAmount; + var additive: Bool; + var tier: Nat; + }; + + public type GroupStable = { + namespace: Text; + members: [(Principal, Int)]; // + redemptions: [(Principal, Nat)]; // + pricing: Pricing; + allowed_amount: ?AllowedAmount; + additive: Bool; + tier: Nat; + }; + + public func group_stabalize(item : Group) : GroupStable { + + return { + namespace = item.namespace; + members = Iter.toArray(Map.entries(item.members)); // + redemptions = Iter.toArray(Map.entries(item.redemptions));// + pricing = item.pricing; + allowed_amount = item.allowed_amount; + additive = item.additive; + tier = item.tier; + }; + }; + + + + + + public type AddGroupRequest = { + key: Text; + item: { + #add: { + namespace: Text; + members: [Principal]; + pricing: ?Pricing; + allowed_amount: ?AllowedAmount; + tier: Nat; + additive: Bool; + }; + }; + }; + // public type GroupsStable = [(Text,GroupStable)]; + + // public type GroupStable = { + // namespace: Text; + // members: [Principal]; + // redemptions:[(Principal, Nat)]; + // pricing: ?Pricing; + // allowed_amount: ?AllowedAmount; + // }; + public type GetGroupResponse = [{ + namespace: Text; + pricing: ?Pricing; + allowed_amount: ?AllowedAmount; + }]; + public type GetEscrowResponse = { + receipt: NFTTypes.EscrowReceipt; + balance: Nat; + transaction: NFTTypes.TransactionRecord; + }; + + public type TokenSpec = { + #ic: ICTokenSpec; + #extensible : CandyTypes.CandyValue; //#Class + }; + + public type ICTokenSpec = { + canister: Principal; + fee: Nat; + symbol: Text; + decimals: Nat; + standard: { + #DIP20; + #Ledger; + #EXTFungible; + }; + }; + + // ToDo: Need to add opt : #cost_per & #free - I keep having an error when add those options + public type Pricing = [{ + #cost_per: { + amount: Nat; + token: TokenSpec; + }; + #free + }]; + + public type State = { + var owner : Principal; + var manager : ?Principal; + var nft_inventory : NFTInventory; + var nft_group : Groups; + var nft_group_size : Nat; + var nft_reservation : Reservations; + var nft_reservation_size : Nat; + var user_allocations : Allocations; + var user_registrations : Registrations; + var user_purchases: Purchases; + var allocation_expiration : Int; + var nft_gateway : ?Principal; + var sale_open_date : ?Int; + var registration_date : ?Int; + var end_date : ?Int; + var required_lock_date : ?Int; + var allocation_queue : Deque.Deque<(Principal, Int)>; + }; + + public type SaleMetrics = { + owner : Principal; + allocation_expiration : Int; + nft_gateway : ?Principal; + sale_open_date : ?Int; + registration_date : ?Int; + end_date : ?Int; + //feel free to add liberally + }; + + public type AllowedAmount = Nat; + + public type ManageNFTRequest = { + #add: NFTInventoryItemRequest; + #remove: Text; //token_id should be unique + }; + + public type ManageNFTItemResponse = { + #add: Text; + #remove: Text; + #err: (Text, OrigynError); + }; + + public type ManageNFTResponse = { + total_size: Nat; + items: [ManageNFTItemResponse]; + }; + + public type ManageGroupRequest = [{ + #update: { + namespace: Text; + members: ?[Principal]; + pricing: ?Pricing; + allowed_amount: ?AllowedAmount; + tier: Nat; + additive: Bool; + }; + #remove: { + namespace: Text; + }; + #addMembers: { + namespace: Text; + members: [Principal]; + }; + #removeMembers: { + namespace: Text; + members: [Principal]; + }; + }]; + + public type ManageGroupResult = { + #update: Result.Result; + #remove: Result.Result;//namespace removed + #addMembers: Result.Result<(Nat, Nat), OrigynError>;//number added, number total + #removeMembers: Result.Result<(Nat,Nat), OrigynError>;//number added, number total + #err: OrigynError; + }; + + public type ManageGroupResponse = [ManageGroupResult]; + + public type Reservations = Map.Map; + + public type Reservation = { + namespace: Text; + reservation_type : ReservationType; + exclusive: Bool; //this means that these nfts only can be in this reservation + nfts: [Text]; + }; + public type ReservationType = { + #Groups : [Text]; + #Principal : Principal; + }; + + public type ManageReservationRequest = { + #add: { + namespace: Text; + reservation_type : { + #Groups : [Text] ; + #Principal : Principal; + }; + exclusive: Bool; + nfts: [Text]; + }; + #remove: { + namespace: Text; + }; + #addNFTs: { + namespace: Text; + nfts: [Text]; + }; + #removeNFTs: { + namespace: Text; + nfts: [Text]; + }; + #update_type: { + namespace: Text; + reservation_type : { + #Groups : [Text] ; + #Principal : Principal; + }; + }; + }; + + public type ManageReservationItemResponse = { + #add: Text; + #remove: Text; + #addNFTs: Nat; + #removeNFTs: Nat; + #update_type : Text; + #err: (Text, OrigynError); + }; + + public type ManageReservationResponse = { + total_size: Nat; + items: [ManageReservationItemResponse]; + }; + + public type AllocationRequest = { + principal : Principal; + number_to_allocate: Nat; //creator can set a max + token: ?TokenSpec; //null if only claiming free items + }; + + public type AllocationResponse = { + allocation_size: Nat; + token: ?TokenSpec; + principal: Principal; + expiration: Int; + }; + + public type RedeemAllocationRequest = { + escrow_receipt: NFTTypes.EscrowReceipt; //creator can set a max + }; + + public type RedeemAllocationResponse = { + nfts: [{token_id:Text; transaction: Result.Result< NFTTypes.TransactionRecord, OrigynError>}]; + }; + + //users can only have one registration so we want to be careful about overwriting + //data about allocations. + public type Registration = { + principal: Principal; + var max_desired: Nat; + var escrow_receipt: ?NFTTypes.EscrowReceipt; + var allocation_size : Nat; + var allocation: Map.Map; + }; + + public type RegistrationClaim = { + var claimed : Bool; + var trx : ?NFTTypes.TransactionRecord; + }; + + public type Registrations = Map.Map; + + public type RegisterEscrowRequest = { + principal: Principal; + max_desired: Nat; + escrow_receipt: ?NFTTypes.EscrowReceipt; //creator can set a max + }; + + public type RegisterEscrowAllocationDetail = { + token_id: Text; + claimed: Bool; + trx: ?NFTTypes.TransactionRecord; + }; + + + public func stabalize_xfer_RegisterAllocation(item: (Text, RegistrationClaim)) : RegisterEscrowAllocationDetail{ + return { + token_id = item.0; + claimed = item.1.claimed; + trx = item.1.trx; + }}; + + public type RegisterEscrowResponse = { + max_desired: Nat; + principal: Principal; + escrow_receipt: ?NFTTypes.EscrowReceipt; //creator can set a max + allocation : [RegisterEscrowAllocationDetail]; + allocation_size: Nat; + }; + + public type TestRequest = { + account_id: NFTTypes.Account; + standard: { + #DIP20; + #Ledger; + #EXTFungible; + }; + }; + + public type Errors = { + #bad_date; + #bad_canister_trx; + #reservation_item_exists; + #reservation_item_does_not_exists; + #group_item_exists; + #group_item_does_not_exists; + #inventory_item_exists; + #inventory_item_does_not_exists; + #improper_allocation; + #improper_escrow; + #improper_lock; + #inventory_empty; + #registartion_not_open; + #allocation_does_not_exist; + #bad_config; + #nyi; + #ijn; + #nti; + #unauthorized_access + }; + + public func errors(the_error : Errors, flag_point: Text, caller: ?Principal) : OrigynError { + + + switch(the_error){ + case(#bad_date){ + return { + number = 16; + text = "bad date"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#bad_config){ + return { + number = 32; + text = "bad config"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + case(#bad_canister_trx){ + return { + number = 64; + text = "bad canister trx"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + + + case(#unauthorized_access){ + return { + number = 2000; + text = "unauthorized access"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + //inventory 4000s + case(#inventory_item_exists){ + return { + number = 4000; + text = "inventory item exists"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#inventory_item_does_not_exists){ + return { + number = 4001; + text = "inventory item does not exists"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + case(#group_item_exists){ + return { + number = 4002; + text = "group item exists"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#group_item_does_not_exists){ + return { + number = 4003; + text = "group item does not exists"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#reservation_item_exists){ + return { + number = 4004; + text = "reservation item exists"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#reservation_item_does_not_exists){ + return { + number = 4005; + text = "reservation item does not exists"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + //allocations 5000 + case(#improper_allocation){ + return { + number = 5000; + text = "improper allocation"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#allocation_does_not_exist){ + return { + number = 5001; + text = "allocation does not exist"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#improper_lock){ + return { + number = 5002; + text = "improper escrow lock"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#improper_escrow){ + return { + number = 5003; + text = "ecrow not valid"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#inventory_empty){ + return { + number = 5004; + text = "inventory empty"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#registartion_not_open){ + return { + number = 5005; + text = "registration not open"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + + + + // + case(#nyi){ + return { + number = 1999; + text = "not yet implemented"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#ijn){ + return { + number = 001; + text = "implemented just now"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + case(#nti){ + return { + number = 002; + text = "No token ids"; + error = the_error; + flag_point = flag_point; + caller = caller} + }; + + + + + }; + }; + + + public type Service = actor { + manage_nfts_sale_nft_origyn : ([ManageNFTRequest]) -> async Result.Result; + allocate_sale_nft_origyn: (AllocationRequest) -> async Result.Result; + redeem_allocation_sale_nft_origyn: (RedeemAllocationRequest) -> async Result.Result; + register_escrow_sale_nft_origyn: (RegisterEscrowRequest) -> async Result.Result; + execute_claim_sale_nft_origyn: (Text) -> async Result.Result; + manage_reservation_sale_nft_origyn: ([ManageReservationRequest]) -> async Result.Result; + }; + +} \ No newline at end of file diff --git a/src/tests/all_ref.mo b/src/tests/all_ref.mo new file mode 100644 index 0000000..2c0c154 --- /dev/null +++ b/src/tests/all_ref.mo @@ -0,0 +1,74 @@ + +import C "mo:matchers/Canister"; +import M "mo:matchers/Matchers"; +import T "mo:matchers/Testable"; +import D "mo:base/Debug"; +import Principal "mo:base/Principal"; +import Blob "mo:base/Blob"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import SalesCanister "../origyn_sale_reference/main"; +import TestRunner "test_runner"; + + +import CollectionTestCanisterDef "test_runner_collection"; +import DataTestCanisterDef "test_runner_data"; +import InstantTest "test_runner_instant_transfer"; +import NFTTestCanisterDef2 "test_runner_nft_2"; +import NFTTestCanisterDef "test_runner_nft"; +import SaleTestCanisterDef "test_runner_sale"; +import StorageTestCanisterDef "test_runner_storage"; +import UtilTestCanisterDef "test_runner_utils"; + +import Wallet "test_wallet"; + +import CanisterFactoryDef "canister_creator"; +import StorageFactory "storage_creator"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; + +import Migrations "../origyn_nft_reference/migrations"; +import StorageMigrations "../origyn_nft_reference/migrations_storage"; + + +shared (deployer) actor class test_runner(dfx_ledger: Principal,test_runner_nft: Principal) = this { + + + type test_runner_nft_service = actor { + test: () -> async ({#success; #fail : Text}); + }; + + let it = C.Tester({ batchSize = 8 }); + + + + public shared func test() : async Text { + + //this is annoying, but it is gets around the "not defined bug"; + let NFTTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_nft)); + + + it.should("run nft tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + //D.print("about to send to test canister"); + let resultdfx = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(NFTTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + //D.print(debug_show(resultdfx)); + + let result = await NFTTestCanister.test(); + //D.print("result"); + //D.print(debug_show(result)); + //M.attempt(greeting, M.equals(T.text("Hello, Christoph!"))) + return result; + }); + + + await it.runAll() + // await it.run() + } +} \ No newline at end of file diff --git a/src/tests/canister_creator.mo b/src/tests/canister_creator.mo new file mode 100644 index 0000000..f88981a --- /dev/null +++ b/src/tests/canister_creator.mo @@ -0,0 +1,20 @@ + +import C "mo:matchers/Canister"; +import M "mo:matchers/Matchers"; +import T "mo:matchers/Testable"; +import D "mo:base/Debug"; +import Principal "mo:base/Principal"; +import Blob "mo:base/Blob"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import NFTCanisterDef "../origyn_nft_reference/main"; + +shared (deployer) actor class canister_creator() = this { + + public shared func create(data: {owner : Principal; storage_space: ?Nat}) : async Principal { + D.print("in create nft"); + let a = await NFTCanisterDef.Nft_Canister(data); + debug { D.print("should have it....returning" # debug_show(data)) }; + return Principal.fromActor(a); + }; +}; \ No newline at end of file diff --git a/src/tests/ledger-canister-min.wasm b/src/tests/ledger-canister-min.wasm new file mode 100644 index 0000000..2de269b Binary files /dev/null and b/src/tests/ledger-canister-min.wasm differ diff --git a/src/tests/ledger_archive.did b/src/tests/ledger_archive.did new file mode 100644 index 0000000..5c49edc --- /dev/null +++ b/src/tests/ledger_archive.did @@ -0,0 +1,70 @@ +type BlockIndex = nat64; +type Memo = nat64; +type AccountIdentifier = blob; +type Tokens = record { e8s : nat64 }; +type Timestamp = record { timestamp_nanos : nat64 }; + +type Operation = variant { + Mint : record { + to : AccountIdentifier; + amount : Tokens; + }; + Burn : record { + from : AccountIdentifier; + amount : Tokens; + }; + Transfer : record { + from : AccountIdentifier; + to : AccountIdentifier; + amount : Tokens; + fee : Tokens; + }; +}; + +type Transaction = record { + memo : Memo; + // Optional to support potential future variant extensions. + operation : opt Operation; + created_at_time : Timestamp; +}; + +type Block = record { + parent_hash : opt blob; + transaction : Transaction; + timestamp : Timestamp; +}; + +type GetBlocksArgs = record { + start : BlockIndex; + length : nat64; +}; + +type BlockRange = record { + blocks : vec Block; +}; + +type GetBlocksError = variant { + + /// The [GetBlocksArgs.start] is below the first block that + /// archive node stores. + BadFirstBlockIndex : record { + requested_index : BlockIndex; + first_valid_index : BlockIndex; + }; + + /// Reserved for future use. + Other : record { + error_code : nat64; + error_message : text; + }; +}; + +type GetBlocksResult = variant { + Ok : BlockRange; + Err : GetBlocksError; +}; + +service : { + get_blocks : (GetBlocksArgs) -> (GetBlocksResult) query; + remove_last_block: (nat) -> (); +} \ No newline at end of file diff --git a/src/tests/ledger_dfx_v2.did b/src/tests/ledger_dfx_v2.did new file mode 100644 index 0000000..959b503 --- /dev/null +++ b/src/tests/ledger_dfx_v2.did @@ -0,0 +1,461 @@ + + +type CanisterId = principal; + +type HeaderField = record {text; text}; +type HttpRequest = record { + url: text; + method: text; + body: vec nat8; + headers: vec HeaderField; +}; + +type HttpResponse = record { + body: vec nat8; + headers: vec HeaderField; + status_code: nat16; +}; + +//check +type TipOfChainRes = record { + certification: opt vec nat8; + tip_index: BlockHeight; +}; + +//check +type Hash = opt record { + inner: blob; +}; + +//check +type BlockArg = BlockHeight; + +//check +//maybe needs to be BlockDFX +type BlockRes = opt variant { + Ok: opt variant { + Ok: Block; + Err: CanisterId; + }; + Err: text; +}; + + +//---------------------------------------------- +//check +type Duration = record { + secs: nat64; + nanos: nat32; +}; + +//check +type ArchiveOptions = record { + trigger_threshold : nat64; + num_blocks_to_archive : nat64; + node_max_memory_size_bytes: opt nat64; + max_message_size_bytes: opt nat64; + controller_id: principal; + cycles_for_archive_creation: opt nat64; +}; + +// Height of a ledger block. +//check +type BlockHeight = nat64; + + +//check +type OperationDFX = variant { + Burn: record { + from: AccountIdentifierDFX; + amount: Tokens; + }; + Mint: record { + to: AccountIdentifierDFX; + amount: Tokens; + }; + Send: record { + from: AccountIdentifierDFX; + to: AccountIdentifierDFX; + amount: Tokens; + }; +}; + +// Arguments for the `send_dfx` call. +//check +type SendArgs = record { + memo: Memo; + amount: Tokens; + fee: Tokens; + from_subaccount: opt SubAccount; + to: AccountIdentifier; + created_at_time: opt TimeStamp; +}; + + +// Arguments for the `notify` call. +//check +type NotifyCanisterArgs = record { + // The of the block to send a notification about. + block_height: BlockHeight; + // Max fee, should be 10000 e8s. + max_fee: Tokens; + // Subaccount the payment came from. + from_subaccount: opt SubAccount; + // Canister that received the payment. + to_canister: principal; + // Subaccount that received the payment. + to_subaccount: opt SubAccount; +}; + + +//check +type LedgerCanisterInitPayload = record { + minting_account: AccountIdentifierDFX; + initial_values: vec record {AccountIdentifierDFX; Tokens}; + max_message_size_bytes: opt nat64; + transaction_window: opt Duration; + archive_options: opt ArchiveOptions; + send_whitelist: vec principal; + standard_whitelist: vec principal; + transfer_fee: opt Tokens; + token_symbol: opt text; + token_name: opt text; + admin: principal; +}; + +//------------------- +//------------------- +//check +type Tokens = record { + e8s : nat64; +}; + +// Number of nanoseconds from the UNIX epoch in UTC timezone. +//check +type TimeStamp = record { + timestamp_nanos: nat64; +}; + +// AccountIdentifier is a 32-byte array. +// The first 4 bytes is big-endian encoding of a CRC32 checksum of the last 28 bytes. +//problem = is this vec nat8 or text +type AccountIdentifierDFX = text; + +type AccountIdentifier = blob; + +// Subaccount is an arbitrary 32-byte byte array. +// Ledger uses subaccounts to compute the source address, which enables one +// principal to control multiple ledger accounts. +//check - problem - used to be vec nat8 +type SubAccount = blob; + +// Sequence number of a block produced by the ledger. +//check +type BlockIndex = nat64; + +// An arbitrary number associated with a transaction. +// The caller can set it in a `transfer` call as a correlation identifier. +//check +type Memo = nat64; + +// Arguments for the `transfer` call. +//check +type TransferArgs = record { + // Transaction memo. + // See comments for the `Memo` type. + memo: Memo; + // The amount that the caller wants to transfer to the destination address. + amount: Tokens; + // The amount that the caller pays for the transaction. + // Must be 10000 e8s. + fee: Tokens; + // The subaccount from which the caller wants to transfer funds. + // If null, the ledger uses the default (all zeros) subaccount to compute the source address. + // See comments for the `SubAccount` type. + from_subaccount: opt SubAccount; + // The destination account. + // If the transfer is successful, the balance of this address increases by `amount`. + to: AccountIdentifier; + // The point in time when the caller created this request. + // If null, the ledger uses current IC time as the timestamp. + created_at_time: opt TimeStamp; +}; + +// Arguments for the `transfer_standard_stdldg` call. +//check +type TransferStandardArgs = record { + // Transaction memo. + // See comments for the `Memo` type. + memo: Memo; + // The amount that the caller wants to transfer to the destination address. + amount: Tokens; + // The amount that the caller pays for the transaction. + // Must be 10000 e8s. + fee: Tokens; + // The principal from which the standard canister wants to transfer funds. + + from_principal: principal; + // The subaccount from which the caller wants to transfer funds. + // If null, the ledger uses the default (all zeros) subaccount to compute the source address. + // See comments for the `SubAccount` type. + from_subaccount: opt SubAccount; + // The destination account. + // If the transfer is successful, the balance of this address increases by `amount`. + to: AccountIdentifier; + // The point in time when the caller created this request. + // If null, the ledger uses current IC time as the timestamp. + created_at_time: opt TimeStamp; +}; + +//check +type TransferError = variant { + // The fee that the caller specified in the transfer request was not the one that ledger expects. + // The caller can change the transfer fee to the `expected_fee` and retry the request. + BadFee : record { expected_fee : Tokens; }; + // The account specified by the caller doesn't have enough funds. + InsufficientFunds : record { balance: Tokens; }; + // The request is too old. + // The ledger only accepts requests created within 24 hours window. + // This is a non-recoverable error. + TxTooOld : record { allowed_window_nanos: nat64 }; + // The caller specified `created_at_time` that is too far in future. + // The caller can retry the request later. + TxCreatedInFuture : null; + // The ledger has already executed the request. + // `duplicate_of` field is equal to the index of the block containing the original transaction. + TxDuplicate : record { duplicate_of: BlockIndex; } +}; + +//check +type TransferResult = variant { + Ok : BlockIndex; + Err : TransferError; +}; + +// Arguments for the `account_balance_dfx` call. +//check +type AccountBalanceArgsDFX = record { + account: AccountIdentifierDFX; +}; + +// Arguments for the `account_balance` call. +//check +type AccountBalanceArgs = record { + account: AccountIdentifier; +}; + +//check +type TransferFeeArg = record {}; + +//check +type TransferFee = record { + // The fee to pay to perform a transfer + transfer_fee: Tokens; +}; + +//check +type GetBlocksArgs = record { + // The index of the first block to fetch. + start : BlockIndex; + // Max number of blocks to fetch. + length : nat64; +}; + +//check +type Operation = variant { + Mint : record { + to : AccountIdentifier; + amount : Tokens; + }; + Burn : record { + from : AccountIdentifier; + amount : Tokens; + }; + Transfer : record { + from : AccountIdentifier; + to : AccountIdentifier; + amount : Tokens; + fee : Tokens; + }; +}; + +//check +type Transaction = record { + memo : Memo; + operation : opt Operation; + created_at_time : TimeStamp; +}; + +//check +type TransactionDFX = record { + memo : Memo; + operation : opt OperationDFX; + created_at_time : TimeStamp; +}; + +//check +type Block = record { + parent_hash : opt blob; + transaction : Transaction; + timestamp : TimeStamp; +}; + +type BlockDFX = record { + parent_hash : opt blob; + transaction : TransactionDFX; + timestamp : TimeStamp; +}; + +// A prefix of the block range specified in the [GetBlocksArgs] request. +//check +type BlockRange = record { + // A prefix of the requested block range. + // The index of the first block is equal to [GetBlocksArgs.from]. + // + // Note that the number of blocks might be less than the requested + // [GetBlocksArgs.len] for various reasons, for example: + // + // 1. The query might have hit the replica with an outdated state + // that doesn't have the full block range yet. + // 2. The requested range is too large to fit into a single reply. + // + // NOTE: the list of blocks can be empty if: + // 1. [GetBlocksArgs.len] was zero. + // 2. [GetBlocksArgs.from] was larger than the last block known to the canister. + blocks : vec Block; +}; + +// An error indicating that the arguments passed to [QueryArchiveFn] were invalid. +//check +type QueryArchiveError = variant { + // [GetBlocksArgs.from] argument was smaller than the first block + // served by the canister that received the request. + BadFirstBlockIndex : record { + requested_index : BlockIndex; + first_valid_index : BlockIndex; + }; + + // Reserved for future use. + Other : record { + error_code : nat64; + error_message : text; + }; +}; + +//check +type QueryArchiveResult = variant { + // Successfully fetched zero or more blocks. + Ok : BlockRange; + // The [GetBlocksArgs] request was invalid. + Err : QueryArchiveError; +}; + +// A function that is used for fetching archived ledger blocks. +//check +type QueryArchiveFn = func (GetBlocksArgs) -> (QueryArchiveResult) query; + +// The result of a "query_blocks" call. +// +// The structure of the result is somewhat complicated because the main ledger canister might +// not have all the blocks that the caller requested: One or more "archive" canisters might +// store some of the requested blocks. +// +// Note: as of Q4 2021 when this interface is authored, the IC doesn't support making nested +// query calls within a query call. +//check +type QueryBlocksResponse = record { + // The total number of blocks in the chain. + // If the chain length is positive, the index of the last block is `chain_len - 1`. + chain_length : nat64; + + // System certificate for the hash of the latest block in the chain. + // Only present if `query_blocks` is called in a non-replicated query context. + certificate : opt blob; + + // List of blocks that were available in the ledger when it processed the call. + // + // The blocks form a contiguous range, with the first block having index + // [first_block_index] (see below), and the last block having index + // [first_block_index] + len(blocks) - 1. + // + // The block range can be an arbitrary sub-range of the originally requested range. + blocks : vec Block; + + // The index of the first block in "blocks". + // If the blocks vector is empty, the exact value of this field is not specified. + first_block_index : BlockIndex; + + // Encoding of instructions for fetching archived blocks whose indices fall into the + // requested range. + // + // For each entry `e` in [archived_blocks], `[e.from, e.from + len)` is a sub-range + // of the originally requested block range. + archived_blocks : vec record { + // The index of the first archived block that can be fetched using the callback. + start : BlockIndex; + + // The number of blocks that can be fetch using the callback. + length : nat64; + + // The function that should be called to fetch the archived blocks. + // The range of the blocks accessible using this function is given by [from] + // and [len] fields above. + callback : QueryArchiveFn; + }; +}; + +//check +type Archive = record { + canister_id: principal; +}; + +//check +type Archives = record { + archives: vec Archive; +}; + +service: (LedgerCanisterInitPayload) -> { + send_dfx : (SendArgs) -> (BlockHeight); + notify_dfx: (NotifyCanisterArgs) -> (); + account_balance_dfx : (AccountBalanceArgsDFX) -> (Tokens) query; + get_nodes : () -> (vec CanisterId) query; + http_request: (HttpRequest) -> (HttpResponse) query; + + get_admin: (record {}) -> (principal) query; + get_send_whitelist_dfx: (record {}) -> (vec principal) query; + get_minting_account_id_dfx: (record {}) -> (opt AccountIdentifier) query; + + set_admin: (principal) -> (); + set_send_whitelist_dfx: (vec principal) -> (); + set_standard_whitelist_dfx: (vec principal) -> (); + set_minting_account_id_dfx: (AccountIdentifier) -> (); + + + total_supply_dfx : (record {}) -> (Tokens) query; + tip_of_chain_dfx : (record {}) -> (TipOfChainRes) query; + transfer : (TransferArgs) -> (TransferResult); + transfer_standard_stdldg : (TransferStandardArgs) -> (TransferResult); + + // Returns the amount of Tokens on the specified account. + account_balance : (AccountBalanceArgs) -> (Tokens) query; + + // Returns the current transfer_fee. + transfer_fee : (TransferFeeArg) -> (TransferFee) query; + + block_dfx : (BlockArg) -> (BlockRes) query; + + // Queries blocks in the specified range. + query_blocks : (GetBlocksArgs) -> (QueryBlocksResponse) query; + + // Returns token symbol. + symbol : () -> (record { symbol: text }) query; + + // Returns token name. + name : () -> (record { name: text }) query; + + // Returns token decimals. + decimals : () -> (record { decimals: nat32 }) query; + + // Returns the existing archive canisters information. + archives : () -> (Archives) query; +} \ No newline at end of file diff --git a/src/tests/prod-canister-deploy.md b/src/tests/prod-canister-deploy.md new file mode 100644 index 0000000..9c8595f --- /dev/null +++ b/src/tests/prod-canister-deploy.md @@ -0,0 +1,107 @@ +dfx canister --network ic call origyn_nft_reference market_transfer_nft_origyn '( record { + token_id="ogy.nftforgood_uffc.0"; + sales_config = record { + escrow_receipt = null; + pricing = variant { + auction = record{ + reserve = opt(100000000000000:nat); + token = variant { + ic = record{ + canister = principal "ryjl3-tyaaa-aaaaa-aaaba-cai"; + standard = variant {Ledger =null}; + decimals = 8:nat; + symbol = "ICP"; + fee = 10000; + } + }; + buy_now= null; + start_price = 100_000:nat; + start_date = 0; + ending = variant{date = 1651288696000000000:int}; + min_increase = variant{amount = 100_000:nat}; + } + } + } +})' + + +dfx canister --network ic call origyn_nft_reference end_sale_nft_origyn '("ogy.nftforgood_uffc.0")' + + +dfx canister --network ic call origyn_nft_reference market_transfer_nft_origyn '( record { + token_id="ogy.nftforgood_uffc.0"; + sales_config = record { + escrow_receipt = null; + pricing = variant { + auction = record{ + reserve = null; + token = variant { + ic = record{ + canister = principal "ryjl3-tyaaa-aaaaa-aaaba-cai"; + standard = variant {Ledger =null}; + decimals = 8:nat; + symbol = "ICP"; + fee = 10000; + } + }; + buy_now= null; + start_price = 3_500_000_000:nat; + start_date = 0; + ending = variant{date = 1651906800000000000:int}; + min_increase = variant{amount = 100_000_000:nat}; + } + } + } +})' + +dfx canister --network ic call origyn_nft_reference market_transfer_nft_origyn '( record { + token_id="ogy.nftforgood_uffc.1"; + sales_config = record { + escrow_receipt = null; + pricing = variant { + auction = record{ + reserve = null; + token = variant { + ic = record{ + canister = principal "ryjl3-tyaaa-aaaaa-aaaba-cai"; + standard = variant {Ledger =null}; + decimals = 8:nat; + symbol = "ICP"; + fee = 10000; + } + }; + buy_now= null; + start_price = 3_500_000_000:nat; + start_date = 0; + ending = variant{date = 1651906800000000000:int}; + min_increase = variant{amount = 100_000_000:nat}; + } + } + } +})' + +dfx canister --network ic call origyn_nft_reference market_transfer_nft_origyn '( record { + token_id="ogy.nftforgood_uffc.2"; + sales_config = record { + escrow_receipt = null; + pricing = variant { + auction = record{ + reserve = null; + token = variant { + ic = record{ + canister = principal "ryjl3-tyaaa-aaaaa-aaaba-cai"; + standard = variant {Ledger =null}; + decimals = 8:nat; + symbol = "ICP"; + fee = 10000; + } + }; + buy_now= null; + start_price = 3_500_000_000:nat; + start_date = 0; + ending = variant{date = 1651906800000000000:int}; + min_increase = variant{amount = 100_000_000:nat}; + } + } + } +})' \ No newline at end of file diff --git a/src/tests/storage_creator.mo b/src/tests/storage_creator.mo new file mode 100644 index 0000000..5c52132 --- /dev/null +++ b/src/tests/storage_creator.mo @@ -0,0 +1,20 @@ + +import C "mo:matchers/Canister"; +import M "mo:matchers/Matchers"; +import T "mo:matchers/Testable"; +import D "mo:base/Debug"; +import Principal "mo:base/Principal"; +import Blob "mo:base/Blob"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import StorageCanisterDef "../origyn_nft_reference/storage_canister"; + +shared (deployer) actor class storage_creator() = this { + public shared func create(data : {owner : Principal; storage_space: ?Nat}) : async Principal { + D.print("in create storage"); + let a = await StorageCanisterDef.Storage_Canister({gateway_canister = data.owner; storage_space = data.storage_space; network = null}); + debug { D.print("should have it....returning" # debug_show(data)) }; + + return Principal.fromActor(a); + }; +}; \ No newline at end of file diff --git a/src/tests/test_runner.mo b/src/tests/test_runner.mo new file mode 100644 index 0000000..ef5e1be --- /dev/null +++ b/src/tests/test_runner.mo @@ -0,0 +1,292 @@ + +import C "mo:matchers/Canister"; +import M "mo:matchers/Matchers"; +import T "mo:matchers/Testable"; +import D "mo:base/Debug"; +import Principal "mo:base/Principal"; +import Blob "mo:base/Blob"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; + + + +shared (deployer) actor class test_runner(tests : { + canister_factory : Principal; + storage_factory : Principal; + dfx_ledger: ?Principal; + dfx_ledger2: ?Principal; + test_runner_nft: ?Principal; + test_runner_nft_2: ?Principal; + test_runner_instant: ?Principal; + test_runner_data :?Principal; + test_runner_utils: ?Principal; + test_runner_collection: ?Principal; + test_runner_storage: ?Principal; + test_runner_sale: ?Principal; + }) = this { + + + //D.print("tests are " # debug_show(tests)); + + type test_runner_nft_service = actor { + test: (Principal, Principal) -> async ({#success; #fail : Text}); + }; + + let it = C.Tester({ batchSize = 8 }); + + public shared func test() : async Text { + + D.print("tests are " # debug_show(tests)); + + var dfx_ledger = switch(tests.dfx_ledger){ + case(null){Principal.fromText("aaaaa-aa")}; + case(?val){val}; + }; + + var dfx_ledger2 = switch(tests.dfx_ledger2){ + case(null){Principal.fromText("aaaaa-aa")}; + case(?val){val}; + }; + + + //this is annoying, but it is gets around the "not defined bug"; + switch(tests.test_runner_sale){ + case(null){ + D.print("skipping sale tests" # debug_show(tests)); + }; + case(?test_runner_sale){ + D.print("running sale tests" # debug_show(test_runner_sale)); + let SaleTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_sale)); + + it.should("run sale tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + D.print("int the it"); + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + D.print("about to send to test canister" # debug_show(dfx_ledger)); + let resultdfx = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(SaleTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + D.print("about to send to test canister" # debug_show(dfx_ledger2)); + let resultdfx2 = await dfx2.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(SaleTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + D.print(debug_show(resultdfx)); + + let result = await SaleTestCanister.test(tests.canister_factory, tests.storage_factory); + D.print("result"); + //D.print(debug_show(result)); + //M.attempt(greeting, M.equals(T.text("Hello, Christoph!"))) + return result; + }); + }; + }; + + //this is annoying, but it is gets around the "not defined bug"; + switch(tests.test_runner_nft){ + case(null){}; + case(?test_runner_nft){ + D.print("running nft tests"); + let NFTTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_nft)); + + it.should("run nft tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + D.print("about to send to test canister nft" # debug_show(dfx_ledger)); + let resultdfx = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(NFTTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + D.print("about to send to test canister nft 2 " # debug_show(dfx_ledger2)); + let resultdfx2 = await dfx2.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(NFTTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + D.print(debug_show(resultdfx)); + + D.print(debug_show(resultdfx2)); + + let result = await NFTTestCanister.test(tests.canister_factory, tests.storage_factory); + //D.print("result"); + //D.print(debug_show(result)); + //M.attempt(greeting, M.equals(T.text("Hello, Christoph!"))) + return result; + }); + }; + }; + + switch(tests.test_runner_nft_2){ + case(null){}; + case(?test_runner_nft_2){ + D.print("running nft 2 tests"); + let NFTTestCanister2 : test_runner_nft_service = actor(Principal.toText(test_runner_nft_2)); + D.print("running nft 2 tests after"); + it.should("run nft tests 2", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + D.print("about to send to test canister" # debug_show(dfx_ledger)); + let resultdfx = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(NFTTestCanister2), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + //D.print("about to send to test canister"); + let resultdfx2 = await dfx2.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(NFTTestCanister2), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + //D.print(debug_show(resultdfx)); + + let result = await NFTTestCanister2.test(tests.canister_factory, tests.storage_factory); + //D.print("result"); + //D.print(debug_show(result)); + //M.attempt(greeting, M.equals(T.text("Hello, Christoph!"))) + return result; + }); + }; + }; + + switch(tests.test_runner_collection){ + + case(null){}; + case(?test_runner_collection){ + //D.print("running collection tests"); + + let CollectionTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_collection)); + + it.should("run collection tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + + + let result = await CollectionTestCanister.test(tests.canister_factory, tests.storage_factory); + + return result; + }); + }; + }; + + switch(tests.test_runner_storage){ + + case(null){}; + case(?test_runner_storage){ + //D.print("running storage tests"); + + let StorageTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_storage)); + + it.should("run storage tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + + + let result = await StorageTestCanister.test(tests.canister_factory, tests.storage_factory); + + return result; + }); + }; + }; + + switch(tests.test_runner_instant){ + + case(null){}; + case(?test_runner_instant){ + //D.print("running instant tests"); + let InstantTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_instant)); + + it.should("run instant tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + //D.print("about to send to test canister"); + let resultdfx = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(InstantTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + //D.print("about to send to test canister"); + let resultdfx2 = await dfx2.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(InstantTestCanister), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 200_000_000_000_000};}); + + //D.print(debug_show(resultdfx)); + + let result = await InstantTestCanister.test(tests.canister_factory, tests.storage_factory); + //D.print("result"); + //D.print(debug_show(result)); + //M.attempt(greeting, M.equals(T.text("Hello, Christoph!"))) + return result; + }); + }; + }; + + switch(tests.test_runner_data){ + + case(null){}; + case(?test_runner_data){ + //D.print("running data tests"); + let DATATestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_data)); + + it.should("run data tests", func () : async C.TestResult = async { + + let result = await DATATestCanister.test(tests.canister_factory, tests.storage_factory); + //M.attempt(greeting, M.equals(T.text("Hello, Christoph!"))) + return result; + }); + }; + }; + + switch(tests.test_runner_utils){ + + case(null){}; + case(?test_runner_utils){ + //D.print("running util tests"); + let UTILSTestCanister : test_runner_nft_service = actor(Principal.toText(test_runner_utils)); + it.should("run util tests", func () : async C.TestResult = async { + //send testrunnner some dfx tokens + + + let result = await UTILSTestCanister.test(tests.canister_factory, tests.storage_factory); + + return result; + }); + }; + }; + + //D.print("about to run"); + await it.runAll() + //await it.run() + } +} \ No newline at end of file diff --git a/src/tests/test_runner_collection.mo b/src/tests/test_runner_collection.mo new file mode 100644 index 0000000..5f0e5af --- /dev/null +++ b/src/tests/test_runner_collection.mo @@ -0,0 +1,178 @@ + +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Array "mo:base/Array"; +import Blob "mo:base/Blob"; +import C "mo:matchers/Canister"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversion "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import Error "mo:base/Error"; +import Iter "mo:base/Iter"; +import M "mo:matchers/Matchers"; +import Nat "mo:base/Nat"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import TestWalletDef "test_wallet"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; +//import Instant "test_runner_instant_transfer"; + + +shared (deployer) actor class test_runner_collection(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var dip20_fee = 200_000; + + private func get_time() : Int{ + return Time.now(); + }; + + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + //let Instant_Test = await Instant.test_runner_instant_transfer(); + + let suite = S.suite("test nft", [ + S.test("testCollectionData", switch(await testCollectionData()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testCollectionMetadata", switch(await testCollectionMetadata()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testCollectionNFTList", switch(await testCollectionNFTList()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testCollectionOwner", switch(await testCollectionOWner()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testCollectionManager", switch(await testCollectionManager()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + + //S.test("testInstantTransfer", switch(await Instant_Test.testInstantTransfer()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + ]); + S.run(suite); + + return #success; + }; + + // MINT0002 + // MINT0003 + public shared func testCollectionData() : async {#success; #fail : Text} { + //D.print("running testCollectionData"); + + let owner = Principal.toText(Principal.fromActor(this)); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let collection_info_original = switch(await canister.collection_nft_origyn(null)){ + case(#err(err)){ + //throw an error + //D.print(debug_show(err)); + throw(Error.reject("couldn't get canister info before set ")); + }; + case(#ok(val)){val}; + }; + + //set collection info + //D.print("set collection info"); + let collection_update_response = await canister.collection_update_batch_nft_origyn([ + #UpdateLogo(?"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABRWlDQ1BJQ0MgUHJvZmlsZQAAKJFjYGASSSwoyGFhYGDIzSspCnJ3UoiIjFJgf8bAyiDIwM3AwqCfmFxc4BgQ4ANUwgCjUcG3awyMIPqyLsismsYvc5T+MhWGp+Q1nLi29xKmehTAlZJanAyk/wBxWnJBUQkDA2MKkK1cXlIAYncA2SJFQEcB2XNA7HQIewOInQRhHwGrCQlyBrJvANkCyRmJQDMYXwDZOklI4ulIbKi9IMDj467g4RKkEO7m4ULAuaSDktSKEhDtnF9QWZSZnlGi4AgMpVQFz7xkPR0FIwMjIwYGUJhDVH++AQ5LRjEOhFiBGAODxQyg4EOEWDzQD9vlGBj4+xBiakD/CngxMBzcV5BYlAh3AOM3luI0YyMIm3s7AwPrtP//P4czMLBrMjD8vf7//+/t////XcbAwHyLgeHANwA5HmFySGEQ9QAAAFZlWElmTU0AKgAAAAgAAYdpAAQAAAABAAAAGgAAAAAAA5KGAAcAAAASAAAARKACAAQAAAABAAAAEKADAAQAAAABAAAAEAAAAABBU0NJSQAAAFNjcmVlbnNob3Q3CVDhAAAB1GlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyI+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj4xNjwvZXhpZjpQaXhlbFlEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWERpbWVuc2lvbj4xNjwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlVzZXJDb21tZW50PlNjcmVlbnNob3Q8L2V4aWY6VXNlckNvbW1lbnQ+CiAgICAgIDwvcmRmOkRlc2NyaXB0aW9uPgogICA8L3JkZjpSREY+CjwveDp4bXBtZXRhPgpzPGLtAAACxUlEQVQ4ER2Ty3LjRBSGv261JN/imHgqGUgKUkPVFLBnxQPwsLwEK/YDzAQmRUickHHGjmM7tmVduptfWags+fR/zn85baZfnsbkAJqhpzqE4fddYhpZHlnSH7r0nSGYiPuuD6OEifVc2ciThUfAFkVkt/GkISGWEfYBYyBV0eml8QIPHPQcZQg863sn8FRH72LEjrqvqaqE+bwiD5Z61kA0ZB783rOsApzkkMADkakaPDRioPdlywB/QM4hpulQbSBsBNipQQ0mRLrjlHCYcd+x3KWGqch80v9zgZ9alrUfkVjzIqFcL7HOk7fVgR7R7Jz2+K9r+cs0/BsDdz6wUGmlRmtJdJGviWFBYnJiLQlPa6HE90z4zFAe5XxMPO8Enkj7XKaWGljIp13LwNlzKp/j6OBkZFNaovwwMjMbpdzI9b99zYXADwJtnUUikUIqURSDY1LrMGpgoqen4b54JK5XNInhw3TDhy5cvXI8C9ToJPJAPiteMUj8mQw+lmPnlP4tlatFcctv1R+8s5+48Asuq0LxSV7tMZJnhYxeaGNbBt+qr/j5jI1+NkqxdfeX4kd+bf6kf3JFHd5jEs22hSryxqXaFUVbZa3snJVoPQp8KfBMW/fPtuD3h5zq+me+eDMnJGc02wlJv02/bV9qL+REd4f7GOqXrfpsUi5tzSTItOIzM8aUN132N6f03v7E3h1T7y9IOoOXJpG9xha496FhIpfv5eutFuU6FswOBTzo4deR8n5HOnakZ6f4sBW4p+dAYLGIe9y9tCyyDnfNhtuyUlQl6clXMkh1nfEzzbm15IMhyeicutEGJUPJ2OvADjtPlevIsDSOWbFj5Ruy9nbqLnTkSTk3mN2Yai5G2yNs8o3yey3wsRbwFe62guubBVW/T+j0Sbs56y1kur2ZGjyvdKWXijBN6QzGpMOhJBiszRSe5X+yDmKkbL2XjAAAAABJRU5ErkJggg=="), + #UpdateName(?"Test Token"), + #UpdateSymbol(?"TST"), + #UpdateMetadata("collection_id", ?#Text("collection_id"), true) + ]); + + //D.print(debug_show(collection_update_response)); + + + let collection_info_after_set = switch(await canister.collection_nft_origyn(null)){ + case(#err(err)){ + //throw an error + //D.print(debug_show(err)); + throw(Error.reject("couldn't get canister info after set ")); + }; + case(#ok(val)){val}; + }; + + let suite = S.suite("test owner and manager", [ + + S.test("owner is set on default", + switch(collection_info_original.owner){case(?val){Principal.toText(val);};case(null){"ownerfield null"}}, M.equals(T.text(owner))), + S.test("manager is null", + if((switch(collection_info_original.managers){case(null){0};case(?val){val.size()}}) == 0){ + "properly empty" + } else {"improprly found " # debug_show(collection_info_original.managers)} + , M.equals(T.text("properly empty"))), + S.test("logo is null", switch(collection_info_original.logo){ + case(null){"properly null"}; + case(?val){"improprly found " # val}; + }, M.equals(T.text("properly null"))), + S.test("name is null", switch(collection_info_original.name){ + case(null){"properly null"}; + case(?val){"improprly found " # val}; + }, M.equals(T.text("properly null"))), + S.test("symbol is null", switch(collection_info_original.symbol){ + case(null){"properly null"}; + case(?val){"improprly found " # val}; + }, M.equals(T.text("properly null"))), + S.test("totalSupply is 0", + if((switch(collection_info_original.total_supply){case(null){0};case(?val){val;}}) == 0){ + "properly empty" + } else {"improprly found " # debug_show(collection_info_original.total_supply)} + , M.equals(T.text("properly empty"))), + S.test("token_ids is null", if((switch(collection_info_original.token_ids){case(null){0};case(?val){val.size()}}) == 0){ + "properly empty"} else {"improprly found " # debug_show(collection_info_original.token_ids)} + , M.equals(T.text("properly empty"))), + S.test("multi_canister is null", if((switch(collection_info_original.multi_canister){case(null){0};case(?val){val.size()}}) == 0){ + "properly empty"} else {"improprly found " # debug_show(collection_info_original.multi_canister)} + , M.equals(T.text("properly empty"))), + S.test("metadata is null", switch(collection_info_original.metadata){ + case(null){"properly null"}; + case(?val){"improprly found " # debug_show(val)}; + }, M.equals(T.text("properly null"))), + + + S.test("logo is not null after update", switch(collection_info_after_set.logo){ + case(null){"didn't find data"}; + case(?val){"found data"}; + }, M.equals(T.text("found data"))), + S.test("name is not nullafter update", switch(collection_info_after_set.name){ + case(null){"didn't find data"}; + case(?val){val}; + }, M.equals(T.text("Test Token"))), + S.test("symbol is not null after update", switch(collection_info_after_set.symbol){ + case(null){"didn't find data"}; + case(?val){val}; + }, M.equals(T.text("TST"))), + S.test("metadata is not null after update", switch(collection_info_after_set.metadata){ + case(null){"didn't find data"}; + case(?val){"found data"}; + }, M.equals(T.text("found data"))), + ]); + + S.run(suite); + + return #success; + }; + + + +} \ No newline at end of file diff --git a/src/tests/test_runner_data.mo b/src/tests/test_runner_data.mo new file mode 100644 index 0000000..671fd3c --- /dev/null +++ b/src/tests/test_runner_data.mo @@ -0,0 +1,360 @@ + +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Array "mo:base/Array"; +import C "mo:matchers/Canister"; +//import CandyType "mo:candy_0_1_10/types"; +import CandyTypes "mo:candy_0_1_10/types"; +import Conversion "mo:candy_0_1_10/conversion"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import D "mo:base/Debug"; +import Iter "mo:base/Iter"; +import M "mo:matchers/Matchers"; +import NFTUtils "../origyn_nft_reference/utils"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import TestWalletDef "test_wallet"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; +import utils "test_utils"; + + + +shared (deployer) actor class test_runner(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var dip20_fee = 200_000; + + private func get_time() : Int{ + return Time.now(); + }; + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + + let suite = S.suite("test nft", [ + S.test("testDataInterface", switch(await testDataInterface()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + ]); + S.run(suite); + + return #success; + }; + + public shared func testDataInterface() : async {#success; #fail : Text} { + //D.print("running testDataInterface"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(this), 1024, false); + + //D.print("Minting"); + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(a_wallet))); + + //try to get public data DATA0001 + //try to get private data DATA0002 + let getNFTAttempt = await b_wallet.try_get_nft(Principal.fromActor(canister),"1"); + let new_data = #Class([ + {name = Types.metadata.__apps_app_id; value=#Text("com.test.__public"); immutable= true}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(Principal.fromActor(this))])); + immutable=false;}]); + immutable=false;}, + {name = "permissions"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(Principal.fromActor(this))])); + immutable=false;}]); + immutable=false;}, + {name = "data"; value=#Class([ + {name = "val1"; value=#Text("val1-modified"); immutable= false}, + {name = "val2"; value=#Text("val2-modified"); immutable= false}, + {name = "val3"; value=#Class([ + {name = "data"; value=#Text("val3-modified"); immutable= false}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(Principal.fromActor(this))])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}, + {name = "val4"; value=#Class([ + {name = "data"; value=#Text("val4-modified"); immutable= false}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(Principal.fromActor(this))])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(Principal.fromActor(this))])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}]); + immutable=false;} + ]); + //DATA0010 + let setNFTAttemp_fail = await b_wallet.try_set_nft(Principal.fromActor(canister),"1", new_data); + + //DATA0012 + //D.print("should be sucessful"); + let setNFTAttemp = await canister.update_app_nft_origyn(#replace{token_id= "1"; data = new_data}); + //D.print(debug_show(setNFTAttemp)); + + + + + let getNFTAttempt2 = await b_wallet.try_get_nft(Principal.fromActor(canister),"1"); + //D.print(debug_show(getNFTAttempt2)); + + //D.print("have meta"); + let suite = S.suite("test staged Nft", [ + + S.test("test getNFT Attempt", switch(getNFTAttempt){case(#ok(res)){ + + switch(Properties.getClassProperty(res.metadata, Types.metadata.__apps)){ + case(?app){ + //D.print("have app"); + switch(app.value){ + case(#Array(val)){ + //D.print("have val"); + switch(val){ + case(#thawed(classes)){ + var b_foundPublic = false; + var b_foundPrivate = false; + var b_foundVal3 = false; + var b_foundVal4 = false; + //D.print("have classes"); + for(this_item in Iter.fromArray(classes)){ + //D.print("checking"); + //D.print(debug_show(classes)); + let a_app : CandyTypes.Property = Option.get(Properties.getClassProperty(this_item,Types.metadata.__apps_app_id), {immutable = false; name="app"; value =#Text("")}); + //D.print("have a_app"); + //D.print(debug_show(a_app)); + //DATA0001 + if(Conversion.valueToText(a_app.value) == "com.test.__public"){ + b_foundPublic := true; + //try to find val3 which should be hidden + //D.print("looking for val3"); + let a_data : CandyTypes.Property = Option.get(Properties.getClassProperty(this_item,"data"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a data"); + //D.print(debug_show(a_data)); + let a_val : CandyTypes.Property = Option.get(Properties.getClassProperty(a_data.value,"val3"), {immutable = false; name="data"; value =#Text("")}); + let a_val2 : CandyTypes.Property = Option.get(Properties.getClassProperty(a_data.value,"val4"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a val"); + switch(a_val.value){ + case(#Class(valInfo)){ + let a_data_data : CandyTypes.Property = Option.get(Properties.getClassProperty(a_val.value,"data"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a data data"); + + if(Conversion.valueToText(a_data_data.value) == "val3"){ + //D.print("found it"); + b_foundVal3 := true; + } else { + //D.print("didn't find it"); + } + }; + case(_){ + + }; + }; + switch(a_val2.value){ + case(#Class(valInfo)){ + let a_data_data : CandyTypes.Property = Option.get(Properties.getClassProperty(a_val2.value,"data"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a data data"); + + if(Conversion.valueToText(a_data_data.value) == "val4"){ + //D.print("found it"); + b_foundVal3 := true; + } else { + //D.print("didn't find it"); + } + }; + case(_){ + + }; + }; + }; + //DATA0002 + if(Conversion.valueToText(a_app.value) == "com.test.__private"){ + b_foundPrivate := true; + } + }; + + + switch(b_foundPublic, b_foundPrivate, b_foundVal3, b_foundVal4){ + case(true, false, true, false){ + "correct response"; + }; + case(_,_,_,_){ + "something missing or something extra"; + }; + }; + + }; + case(_){ + "wrong type of arrray"; + }; + }; + }; + case(_){ + "not an array"; + }; + + }; + }; + case(null){ + "can't find app"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //DATA0001, DATA0002 + S.test("fail if non allowed calls write", switch(setNFTAttemp_fail){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //unauthorized + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //DATA0010 + S.test("allowed user can write", switch(getNFTAttempt2){case(#ok(res)){ + + switch(Properties.getClassProperty(res.metadata, Types.metadata.__apps)){ + case(?app){ + //D.print("have app"); + switch(app.value){ + case(#Array(val)){ + //D.print("have val"); + switch(val){ + case(#thawed(classes)){ + var b_foundPublic = false; + var b_foundPrivate = false; + var b_foundVal3 = false; + var b_foundVal4 = false; + //D.print("have classes"); + for(this_item in Iter.fromArray(classes)){ + //D.print("checking"); + //D.print(debug_show(classes)); + let a_app : CandyTypes.Property = Option.get(Properties.getClassProperty(this_item, Types.metadata.__apps_app_id), {immutable = false; name="app"; value =#Text("")}); + //D.print("have a_app"); + //D.print(debug_show(a_app)); + //DATA0001 + if(Conversion.valueToText(a_app.value) == "com.test.__public"){ + b_foundPublic := true; + //try to find val3 which should be hidden + //D.print("looking for val3"); + let a_data : CandyTypes.Property = Option.get(Properties.getClassProperty(this_item,"data"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a data"); + //D.print(debug_show(a_data)); + let a_val : CandyTypes.Property = Option.get(Properties.getClassProperty(a_data.value,"val3"), {immutable = false; name="data"; value =#Text("")}); + let a_val2 : CandyTypes.Property = Option.get(Properties.getClassProperty(a_data.value,"val4"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a val"); + switch(a_val.value){ + case(#Class(valInfo)){ + let a_data_data : CandyTypes.Property = Option.get(Properties.getClassProperty(a_val.value,"data"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a data data"); + + if(Conversion.valueToText(a_data_data.value) == "val3-modified"){ + //D.print("found it"); + b_foundVal3 := true; + } else { + //D.print("didn't find it"); + } + }; + case(_){ + + }; + }; + switch(a_val2.value){ + case(#Class(valInfo)){ + let a_data_data : CandyTypes.Property = Option.get(Properties.getClassProperty(a_val2.value,"data"), {immutable = false; name="data"; value =#Text("")}); + //D.print("have a data data"); + + if(Conversion.valueToText(a_data_data.value) == "val4-modified"){ + //D.print("found it"); + b_foundVal3 := true; + } else { + //D.print("didn't find it"); + } + }; + case(_){ + + }; + }; + }; + //DATA0002 + if(Conversion.valueToText(a_app.value) == "com.test.__private"){ + b_foundPrivate := true; + } + }; + + + switch(b_foundPublic, b_foundPrivate, b_foundVal3, b_foundVal4){ + case(true, false, true, false){ + "correct response"; + }; + case(_,_,_,_){ + "something missing or something extra"; + }; + }; + + }; + case(_){ + "wrong type of arrray"; + }; + }; + }; + case(_){ + "not an array"; + }; + + }; + }; + case(null){ + "can't find app"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //DATA0012 + + + ]); + + S.run(suite); + + return #success; + + + + }; + + + +} \ No newline at end of file diff --git a/src/tests/test_runner_instant_transfer.mo b/src/tests/test_runner_instant_transfer.mo new file mode 100644 index 0000000..fdadd09 --- /dev/null +++ b/src/tests/test_runner_instant_transfer.mo @@ -0,0 +1,768 @@ +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import M "mo:matchers/Matchers"; + +import Conversion "mo:candy_0_1_10/conversion"; +import CandyTypes "mo:candy_0_1_10/types"; + +import Principal "mo:base/Principal"; +import Time "mo:base/Time"; +import Result "mo:base/Result"; +import Blob "mo:base/Blob"; +import D "mo:base/Debug"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import TestWalletDef "test_wallet"; +import Types "../origyn_nft_reference/types"; +import Metadata "../origyn_nft_reference/metadata"; +import utils "test_utils"; + +shared (deployer) actor class test_runner_instant_transfer(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + + private func get_time() : Int{ + return Time.now(); + }; + + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + + //let Instant_Test = await Instant.test_runner_instant_transfer(); + + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + + let suite = S.suite("test nft", [ + S.test("testInstantTransfer", switch(await testInstantTransfer()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testSoulbound", switch(await testSoulbound()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + + ]); + S.run(suite); + + return #success; + }; + + public shared func testInstantTransfer() : async {#success; #fail : Text} { + + let this_principal = Principal.fromActor(this); + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + let ledger_principal = dfx_ledger; + + + //create and fund wallet + let a_wallet = await TestWalletDef.test_wallet(); let a_principal = Principal.fromActor(a_wallet); + let fund_a_wallet = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + D.print("funding result end"); + D.print(debug_show(fund_a_wallet)); + + //create canister + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + let canister_principal = Principal.fromActor(canister); + + //stage unminted and minted NFTs + let stage_minted_nft = await utils.buildStandardNFT("first", canister, this_principal, 1024, false); //this sets the owner of the nft to the canister change later on mint + let stage_unminted_nft = await utils.buildStandardNFT("second", canister, this_principal, 1024, false); //this sets the owner of th nft to the canister + + //mint first staged NFT + let mint_nft = await canister.mint_nft_origyn("first", #principal(this_principal));//changing owner of first to this + + + D.print("sending funds"); + //create an escrow by sending tokens to the ledger + let send_tokens_to_canister = await a_wallet.send_ledger_payment(ledger_principal, (1 * 10 ** 8) + 200000, canister_principal); + + D.print("funds sent " # debug_show(send_tokens_to_canister)); + //retreive block information + let block = switch(send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + //reset time to time now + let set_time_mode = await canister.__set_time_mode(#test); + + let set_time = await canister.__advance_time(get_time()); + + D.print("time set"); + + //Attempt to start the auction for minted NFT + let start_auction_minted = await canister.market_transfer_nft_origyn({token_id = "first"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(1 * 10 ** 8); + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8); + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(get_time() + 518400000000000); + min_increase = #amount(10*10**8); + allow_list = null; + }; }; } ); + //Get the sales id for the minted NFT + let sales_id_minted = switch(start_auction_minted){ + case(#ok(val)){ + switch(val.txn_type){ + case(#sale_opened(sale_data)){ + sale_data.sale_id; + }; + case(_){ + //D.print("Didn't find expected sale_opened"); + return #fail("Didn't find expected sale_opened"); + } + }; + }; + case(#err(item)){ + //D.print("error with auction start"); + return #fail("error with auction start"); + }; + }; + + //Attempt to start the auction for the unminted NFT + let start_auction_unminted = await canister.market_transfer_nft_origyn({token_id = "second"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = null; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8); + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(get_time() + 518400000000000); + min_increase = #amount(10*10**8); + allow_list = null; + }; }; } ); + //Get the sales id for the unminted NFT //shouldnt get a sale id + /* let sales_id_unminted = switch(start_auction_unminted){ + case(#ok(val)){ + switch(val.txn_type){ + case(#sale_opened(sale_data)){ + sale_data.sale_id; + }; + case(_){ + //D.print("Didn't find expected sale_opened"); + return #fail("Didn't find expected sale_opened"); + } + }; + }; + case(#err(item)){ + //D.print("error with auction start"); + return #fail("error with auction start"); + }; + }; */ + + D.print("all started"); + + //Sending a valid escrow for minted item + let escrow_minted = await a_wallet.try_escrow_specific_staged(this_principal, canister_principal, ledger_principal, null, 1 * 10 ** 8, "first", ?sales_id_minted, null, null); + + //send general escrow for unminted nft with the same block + //should fail because deposit is burned + let escrow_unminted_same_block = await a_wallet.try_escrow_general_staged(canister_principal, canister_principal, ledger_principal, null, 1 * 10 ** 8, null, null); + + //create another escrow + let send_tokens_to_canister_again = await a_wallet.send_ledger_payment(ledger_principal, (1 * 10 ** 8) + 200000 + 1, canister_principal); + + //get block information + // ¿Can you use the same block twice? - seems not + let block2 = switch(send_tokens_to_canister_again){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + //Should fail - Can't escrow_specific for unminted item + //let escrow_specific_unminted = await a_wallet.try_escrow_specific_staged(this_principal, canister_principal, ledger_principal, block, 1 * 10 ** 8, "second", ?sales_id_unminted, null); + + //send general escrow for unminted nft with new block + let escrow_new_block = await a_wallet.try_escrow_general_staged(canister_principal, canister_principal, ledger_principal, null, 1 * 10 ** 8, null, null); + + //make sure an owner can't instant transfer with my escrow when I'm intending to bid on an auction + D.print("Trying to transfer while auction open"); + let instant_transfer_no_bid_minted = await canister.market_transfer_nft_origyn({ + token_id = "first"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(this_principal); + buyer = #principal(a_principal); + token_id = "first"; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + }); + D.print(debug_show(instant_transfer_no_bid_minted)); + //note: you can't bid on an unminted NFT yet, but this should still fail as #existing_sale_found because auction has been started + let instant_transfer_no_bid_unminted = await canister.market_transfer_nft_origyn({ + token_id = "second"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(canister_principal); //canister still owns second + buyer = #principal(a_principal); + token_id = "second"; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + broker_id = null; + pricing = #instant; + }; + }); + + //Placing valid bid + let valid_bid_minted = await a_wallet.try_bid(canister_principal, this_principal, ledger_principal, 1*10**8, "first", sales_id_minted, null); + + //Placing bid for unminted item - should fail as of now + //D.print("doing valid bid unminted"); + //let valid_bid_unminted = await a_wallet.try_bid(canister_principal, canister_principal, ledger_principal, 1*10**8, "second", sales_id_unminted); + + //D.print(debug_show(valid_bid_unminted)); + //can't do an owner transfer during an auction + let transfer_while_auction_minted = await canister.market_transfer_nft_origyn({ + token_id = "first"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(this_principal); + buyer = #principal(a_principal); + token_id = "first"; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "OGY"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + }); + let transfer_while_auction_unminted = await canister.market_transfer_nft_origyn({ + token_id = "second"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(this_principal); + buyer = #principal(a_principal); + token_id = "second"; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + }); + + let set_time2 = await canister.__advance_time(get_time()+ 518400000000000 + 518400000000000); + + + + //end auctions - should transfer minted NFT to a wallet + let end_auction_minted = await canister.sale_nft_origyn(#end_sale("first")); + //let end_auction_unminted = await canister.end_sale_nft_origyn("second"); + + //try transferring wrong nft with escrow on deposit (make sure user doesn't get something they don't want) + let transfer_wrong_nft = await canister.market_transfer_nft_origyn({ + token_id = "first"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(this_principal); + buyer = #principal(a_principal); + token_id = "second"; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + }); + + //Attempt to transfer unminted NFT after auction + let unminted_instant_transfer = await canister.market_transfer_nft_origyn({ + token_id = "second"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(canister_principal); + buyer = #principal(a_principal); + token_id = ""; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + }); + + //get nft metadata info to check owner for tests + let first_nft_metadata = await a_wallet.try_get_nft(canister_principal, "first"); + let second_nft_metadata = await a_wallet.try_get_nft(canister_principal, "second"); + + /* //Helpful Debug Output + //D.print( "\n" # + "fund_a_wallet: " # debug_show(fund_a_wallet) # "\n\n" # + "stage_minted_nft: " # debug_show(stage_minted_nft) # "\n\n" # + "stage_unminted_nft: " # debug_show(stage_unminted_nft) # "\n\n" # + "mint_nft: " # debug_show(mint_nft) # "\n\n" # + "send_tokens_to_canister: " # debug_show(send_tokens_to_canister) # "\n\n" # + "block: " # debug_show(block) # "\n\n" # + "set_time: " # debug_show(set_time) # "\n\n" # + "start_auction_minted: " # debug_show(start_auction_minted) # "\n\n" # + "sales_id_minted: " # debug_show(sales_id_minted) # "\n\n" # + "start_auction_unminted: " # debug_show(start_auction_unminted) # "\n\n" # + "sales_id_unminted: " # debug_show(sales_id_unminted) # "\n\n" # + "escrow_minted: " # debug_show(escrow_minted) # "\n\n" # + "escrow_unminted_same_block: " # debug_show(escrow_unminted_same_block) # "\n\n" # + "send_tokens_to_canister_again: " # debug_show(send_tokens_to_canister_again) # "\n\n" # + "block2: " # debug_show(block2) # "\n\n" # + "escrow_specific_unminted: " # debug_show(escrow_specific_unminted) # "\n\n" # + "escrow_new_block: " # debug_show(escrow_new_block) # "\n\n" # + "instant_transfer_no_bid_minted: " # debug_show(instant_transfer_no_bid_minted) # "\n\n" # + "instant_transfer_no_bid_unminted: " # debug_show(instant_transfer_no_bid_unminted) # "\n\n" # + "valid_bid_minted: " # debug_show(valid_bid_minted) # "\n\n" # + "valid_bid_unminted: " # debug_show(valid_bid_unminted) # "\n\n" # + "transfer_while_auction_minted: " # debug_show(transfer_while_auction_minted) # "\n\n" # + "transfer_while_auction_unminted: " # debug_show(transfer_while_auction_unminted) # "\n\n" # + "end_auction_minted: " # debug_show(end_auction_minted) # "\n\n" # + "end_auction_unminted: " # debug_show(end_auction_unminted) # "\n\n" # + "transfer_wrong_nft: " # debug_show(transfer_wrong_nft) # "\n\n" # + "unminted_instant_transfer: " # debug_show(unminted_instant_transfer) # "\n\n" + ); */ + + let suite = S.suite("test NFT instant transfer", [ + S.test("NFTs staged succesfully", + switch(stage_minted_nft, stage_unminted_nft) { + case((#ok("first"), #ok(val), #ok(val2), #ok(val3)), + (#ok("second"), #ok(val4), #ok(val5), #ok(val6))) { "staging succesful" }; + case(_,_) { "wrong action --\nfirst: " # debug_show(stage_minted_nft) # "\nsecond: " # debug_show(stage_unminted_nft) };}, + M.equals(T.text("staging succesful"))), + S.test("NFT minted succesfully", + switch(mint_nft) { + case(#ok("first")) { "mint succesful" }; + case(_) { "wrong action: " # debug_show(mint_nft) };}, + M.equals(T.text("mint succesful"))), + S.test("Tokens sent to canister", + switch(send_tokens_to_canister) { + case(#ok(_)) { "transfer succesful" }; + case(_) { "wrong action: " # debug_show(send_tokens_to_canister) };}, + M.equals(T.text("transfer succesful"))), + S.test("Minted auction started correctly", + switch(start_auction_minted) { + case(#ok(_)) { "auction start succesful" }; + case(_) { "wrong action: " # debug_show(start_auction_minted) };}, + M.equals(T.text("auction start succesful"))), + S.test("Unminted auction started correctly", + switch(start_auction_unminted) { + case(#err(err)) { + if (err.error == #nyi) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "wrong action: " # debug_show(start_auction_unminted) };}, + M.equals(T.text("correct error"))), + S.test("Escrow created succesfully (minted NFT)", + switch(escrow_minted) { + case(#ok(info)) { + if (info.balance == 100_000_000 and info.receipt.buyer == #principal(a_principal) and info.receipt.seller == #principal(this_principal)) { "correct escrow data" } + else { "wrong escrow data: " # debug_show(escrow_minted) }; }; + case(_) { "escrow should have passed: " # debug_show(escrow_minted) };}, + M.equals(T.text("correct escrow data"))), + S.test("Escrow with burned deposit (same block twice)", + switch(escrow_unminted_same_block) { + case(#err(err)) { + if (err.number == 3003) { "correct error" } + else { "wrong error: " # debug_show(err)}; }; + case(_) { "escrow should not have passed: " # debug_show(escrow_unminted_same_block) };}, + M.equals(T.text("correct error"))), + S.test("Tokens sent to canister again", + switch(send_tokens_to_canister_again) { + case(#ok(_)) { "transfer succesful" }; + case(_) { "wrong action: " # debug_show(send_tokens_to_canister_again) };}, + M.equals(T.text("transfer succesful"))), + /* S.test("Escrow_specific for an unminted NFT", + switch(escrow_specific_unminted) { + case(#err(err)) { + if (err.error == #token_not_found) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "escrow should not have passed: " # debug_show(escrow_specific_unminted) };}, + M.equals(T.text("correct error"))), */ + S.test("Escrow created succesfully (unminted NFT)", + switch(escrow_new_block) { + case(#ok(info)) { + if (info.balance == 100_000_000 and info.receipt.buyer == #principal(a_principal) ) { "correct escrow data" } + else { "wrong escrow data: " # debug_show(escrow_new_block) }; }; + case(_) { "escrow should have passed: " # debug_show(escrow_new_block) };}, + M.equals(T.text("correct escrow data"))), + S.test("Instant transfer with no bid", + switch(instant_transfer_no_bid_minted) { + case(#err(err)) { + if (err.error == #existing_sale_found) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "nft should not have been transferred: " # debug_show(instant_transfer_no_bid_minted) };}, + M.equals(T.text("correct error"))), + S.test("Instant transfer with no bid on unminted item", + switch(instant_transfer_no_bid_unminted) { + case(#err(err)) { + if (err.error == #no_escrow_found) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "nft should not have been transferred: " # debug_show(instant_transfer_no_bid_unminted) };}, + M.equals(T.text("correct error"))), + S.test("Able to place a valid bid for minted NFT", + switch(valid_bid_minted) { + case(#ok(info)) { + if (info.token_id == "first" and + (switch (info.txn_type) { + case(#auction_bid(content)) { + if (content.amount == 100_000_000 and content.buyer == #principal(a_principal) and content.sale_id == sales_id_minted) {true} + else {false}; }; + case(_) {false}; + })) + { "correct bid data" } + else { "wrong bid data: " # debug_show(valid_bid_minted) }; }; + case(_) { "bid should have been placed: " # debug_show(valid_bid_minted) };}, + M.equals(T.text("correct bid data"))), + /* S.test("Placing a bid for unminted item should fail", + switch(valid_bid_unminted) { + case(#err(err)) { + if (err.error == #no_escrow_found) { "correct error" } //shoulnt be able to find the escrow because ids wont match + else { + //D.print(debug_show(err)); + "wrong error: " # debug_show(err.error)}; }; + case(_) { "escrow should not have passed: " # debug_show(valid_bid_unminted) };}, + M.equals(T.text("correct error"))), */ + S.test("Instant transfer while auction is still open (minted NFT)", + switch(transfer_while_auction_minted) { + case(#err(err)) { + if (err.error == #existing_sale_found) { "correct error" } + else { "wrong error: " # debug_show(err)}; }; + case(_) { "nft should not have been transferred: " # debug_show(transfer_while_auction_minted) };}, + M.equals(T.text("correct error"))), + S.test("Instant transfer while auction is still open (unminted NFT)", + switch(transfer_while_auction_unminted) { + case(#err(err)) { + if (err.error == #unauthorized_access) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "nft should not have been transferred: " # debug_show(transfer_while_auction_unminted) };}, + M.equals(T.text("correct error"))), + S.test("Auction for first item ended succesfully", + switch(end_auction_minted) { + case(#ok(info)) { + switch(info){ + case(#end_sale(info)){ + if ( info.token_id == "first" and + (switch (info.txn_type) { + case(#sale_ended(content)) { + if (content.amount == 100_000_000 and content.buyer == #principal(a_principal) and content.sale_id == ?sales_id_minted) {true} + else {false}; }; + case(_) {false}; + })) { "auction ended correctly" } + else { "wrong auction data: " # debug_show(end_auction_minted) }; + }; + case(_) { "auction should have closed: " # debug_show(end_auction_minted) }; + }; + }; + case(#err(err)) { "auction should have closed: " # debug_show(err) }; + } + , + M.equals(T.text("auction ended correctly"))), + /* S.test("Auction for second item ended succesfully", + switch(end_auction_unminted) { + case(#ok(info)) { + if ( info.token_id == "second" and + (switch (info.txn_type) { + case(#sale_ended(content)) { + if (content.amount == 0 and content.extensible == #Text("no bids")) {true} + else {false}; }; + case(_) {false}; + })) { "auction ended correctly" } + else { "wrong auction data: " # debug_show(end_auction_unminted) }; }; + case(_) { "auction should have closed: " # debug_show(end_auction_unminted) };}, + M.equals(T.text("auction ended correctly"))),*/ + S.test("Instant transfer with the wrong escrow on deposit", + switch(transfer_wrong_nft) { + case(#err(err)) { + if (err.error == #unauthorized_access) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "nft should not have been transferred: " # debug_show(transfer_wrong_nft) };}, + M.equals(T.text("correct error"))), + S.test("Instant transfer with the wrong escrow on deposit", + switch(unminted_instant_transfer) { + case(#ok(info)) { + if (info.token_id == "second") { "transfer succesful" } + else { "wrong metadata: " # debug_show(info)}; }; + case(_) { "nft should have been transferred: " # debug_show(unminted_instant_transfer) };}, + M.equals(T.text("transfer succesful"))), + S.test("minted NFT should be owned by a_wallet", + switch(first_nft_metadata) + {case(#ok(res)){ + if(Types.account_eq(switch(Metadata.get_nft_owner(res.metadata)){ + case(#err(err)){#account_id("invalid")}; + case(#ok(val)){val}; + }, #principal(a_principal) ) == true){ + "was transfered" + } else { + D.print("awallet wrong transfer"); + D.print(debug_show((a_principal, first_nft_metadata))); + "was not transfered"}}; + case(#err(err)) {"unexpected error: " # err.flag_point};}, + M.equals(T.text("was transfered"))), + /* S.test("unminted NFT should be owned by a_wallet", + switch(second_nft_metadata) + {case(#ok(res)){ + if(Types.account_eq(switch(Metadata.get_nft_owner(res.metadata)){ + case(#err(err)){#account_id("invalid")}; + case(#ok(val)){val}; + }, #principal(a_principal) ) == true){ + "was transfered" + } else { + //D.print("awallet"); + //D.print(debug_show(a_principal)); + "was not transfered"}}; + case(#err(err)) {"unexpected error: " # err.flag_point};}, + M.equals(T.text("was transfered"))), */ + ]); + + S.run(suite); + + return #success; + }; + + public shared func testSoulbound() : async {#success; #fail : Text} { + + let this_principal = Principal.fromActor(this); + + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + let ledger_principal = dfx_ledger; + + + //create and fund wallet + let a_wallet = await TestWalletDef.test_wallet(); let a_principal = Principal.fromActor(a_wallet); + + let fund_a_wallet = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + D.print("funding result end"); + D.print(debug_show(fund_a_wallet)); + + //create canister + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + let canister_principal = Principal.fromActor(canister); + + //stage unminted and minted NFTs + let stage_soulbound_nft = await utils.buildStandardNFT("soulbound", canister, this_principal, 1024, true); + + let mint_nft = await canister.mint_nft_origyn("soulbound", #principal(this_principal)); + + //create an escrow by sending tokens to the ledger + let send_tokens_to_canister = await a_wallet.send_ledger_payment(ledger_principal, (1 * 10 ** 8) + 200000, canister_principal); + + //retreive block information + let block = switch(send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + //reset time to time now + let set_time = await canister.__advance_time(get_time()); + + //Attempt to start the auction for soubound NFT + let start_auction_soulbound = await canister.market_transfer_nft_origyn({token_id = "soulbound"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(1 * 10 ** 8); + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8);//nyi + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(get_time()); + min_increase = #amount(10*10**8); + allow_list = null; + }; }; } ); + + //Attempt to transfer unminted NFT after auction + let soulbound_instant_transfer = await canister.market_transfer_nft_origyn({ + token_id = "soulbound"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(canister_principal); + buyer = #principal(a_principal); + token_id = ""; + token = #ic({ + canister = ledger_principal; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + }); + + //Attempt to do an owner transfer + let soulbound_owner_transfer = await a_wallet.try_owner_transfer(canister_principal, "soulbound", #principal(canister_principal)); + + + //Helpful Debug Output + /* D.print( "\n" # + "fund_a_wallet: " # debug_show(fund_a_wallet) # "\n\n" # + "stage_soulbound_nft: " # debug_show(stage_soulbound_nft) # "\n\n" # + "mint_nft: " # debug_show(mint_nft) # "\n\n" # + "send_tokens_to_canister: " # debug_show(send_tokens_to_canister) # "\n\n" # + "block: " # debug_show(block) # "\n\n" # + "set_time: " # debug_show(set_time) # "\n\n" # + "start_auction_soulbound: " # debug_show(start_auction_soulbound) # "\n\n" # + "soulbound_instant_transfer: " # debug_show(soulbound_instant_transfer) # "\n\n" # + "soulbound_owner_transfer: " # debug_show(soulbound_owner_transfer) # "\n\n" + ); */ + + let suite = S.suite("test soulbound NFT", [ + S.test("NFT staged succesfully", + switch(stage_soulbound_nft) { + case((#ok("soulbound"), #ok(_), #ok(_), #ok(_))) { "staging succesful" }; + case(_) { "wrong action: " # debug_show(stage_soulbound_nft) };}, + M.equals(T.text("staging succesful"))), + S.test("NFT minted succesfully", + switch(mint_nft) { + case(#ok("soulbound")) { "mint succesful" }; + case(_) { "wrong action: " # debug_show(mint_nft) };}, + M.equals(T.text("mint succesful"))), + S.test("Tokens sent to canister", + switch(send_tokens_to_canister) { + case(#ok(_)) { "transfer succesful" }; + case(_) { "wrong action: " # debug_show(send_tokens_to_canister) };}, + M.equals(T.text("transfer succesful"))), + S.test("Should fail to start auction on soulbound token", + switch(start_auction_soulbound) { + case(#err(err)) { + if (err.error == #token_non_transferable) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "escrow should not have passed: " # debug_show(start_auction_soulbound) };}, + M.equals(T.text("correct error"))), + S.test("Instant transfer should fail for soulbound token", + switch(soulbound_instant_transfer) { + case(#err(err)) { + if (err.error == #token_non_transferable) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "escrow should not have passed: " # debug_show(soulbound_instant_transfer) };}, + M.equals(T.text("correct error"))), + S.test("Owner transfer should fail for soulbound token", + switch(soulbound_owner_transfer) { + case(#err(err)) { + if (err.error == #token_non_transferable) { "correct error" } + else { "wrong error: " # debug_show(err.error)}; }; + case(_) { "escrow should not have passed: " # debug_show(soulbound_owner_transfer) };}, + M.equals(T.text("correct error"))), + ]); + + S.run(suite); + + return #success; + }; + +} \ No newline at end of file diff --git a/src/tests/test_runner_nft.mo b/src/tests/test_runner_nft.mo new file mode 100644 index 0000000..a52480b --- /dev/null +++ b/src/tests/test_runner_nft.mo @@ -0,0 +1,3067 @@ +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import C "mo:matchers/Canister"; +import Conversion "mo:candy_0_1_10/conversion"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import D "mo:base/Debug"; +import Blob "mo:base/Blob"; +import Int "mo:base/Int"; +import M "mo:matchers/Matchers"; +import NFTUtils "../origyn_nft_reference/utils"; +import Metadata "../origyn_nft_reference/metadata"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Nat "mo:base/Nat"; +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import TestWalletDef "test_wallet"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; +import utils "test_utils"; +//import Instant "test_runner_instant_transfer"; + +// ttps://m7sm4-2iaaa-aaaab-qabra-cai.raw.ic0.app/?tag=1526457217 will provide a facility to convert +// a account hash to an account id +// ie dfx canister --network ic call mexqz-aqaaa-aaaab-qabtq-cai say '(principal "r7inp-6aaaa-aaaaa-aaabq-cai", blob "20\8F\6F\7F\9B\0D\B3\29\36\AA\8B\F4\78\38\E8\B8\15\37\30\F7\3D\03\99\EA\BB\68\98\11\08\64\90\61")' + + +shared (deployer) actor class test_runner(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + + let debug_channel = { + throws = true; + withdraw_detail = true; + }; + + D.print("have ledger values are " # debug_show(dfx_ledger,dfx_ledger2)); + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + + + private type canister_factory = actor { + create : (Principal) -> async Principal; + }; + + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var dip20_fee = 200_000; + + private func get_time() : Int{ + return Time.now(); + }; + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + + //let Instant_Test = await Instant.test_runner_instant_transfer(); + + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + + let suite = S.suite("test nft", [ + S.test("testDeposits", switch(await testDeposit()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testAuction", switch(await testAuction()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testStandardLedger", switch(await testStandardLedger()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testMarketTransfer", switch(await testMarketTransfer()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testOwnerTransfer", switch(await testOwnerTransfer()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testOffer", switch(await testOffers()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testRoyalties", switch(await testRoyalties()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + + ]); + S.run(suite); + + return #success; + }; + + public shared func testDeposit() : async {#success; #fail : Text} { + D.print("running testDeposit"); + + + D.print("making wallets"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let timeset = await canister.__set_time_mode(#test); + let startTime = Time.now(); + let atime = await canister.__advance_time(startTime); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + + //mint 2 + let mint_attempt = await canister.mint_nft_origyn("2", #principal(Principal.fromActor(this))); + let mint_attempt2 = await canister.mint_nft_origyn("3", #principal(Principal.fromActor(this))); + + D.print("starting sale"); + let sale_start = await canister.market_transfer_nft_origyn({token_id = "2"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = null; + token = #ic({ + canister = dfx_ledger; + standard = #Ledger; + decimals = 8; + symbol = "OGY"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8);//nyi + start_price = (100 * 10 ** 8); + start_date = 0; + ending = #date(startTime + DAY_LENGTH); + min_increase = #amount(10*10**8); + allow_list = null; + }; + }; } ); + + + + + + D.print("funding"); + //funding + D.print("funding"); + //funding + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = ?{timestamp_nanos = Nat64.fromNat(Int.abs(Time.now()))}; + amount = {e8s = 100 * 10 ** 8};}); + + D.print("funding result " # debug_show(funding_result)); + let funding_result2 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = ?{timestamp_nanos = Nat64.fromNat(Int.abs(Time.now()))}; + amount = {e8s = 100 * 10 ** 8};}); + + + D.print("funding result 2 " # debug_show(funding_result2)); + + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (4 * 10 ** 8) + 800000, Principal.fromActor(canister)); + //let a_wallet_send_tokens_to_b = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), 1 * 10 ** 8, Principal.fromActor(canister)); + + let b_wallet_send_tokens_to_canister = await b_wallet.send_ledger_payment(Principal.fromActor(dfx), (2 * 10 ** 8) + 400000, Principal.fromActor(canister)); + //let b_wallet_send_tokens_to_canister2 = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), 1 * 10 ** 8, Principal.fromActor(canister)); + + D.print("Done funding"); + //send an escrow locked until a certain lock time + + //escrow for a general nft with to owner of nft + let lockedEscrow_specific_no_sale = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, null, ?(startTime + DAY_LENGTH)); + debug{ if(debug_channel.withdraw_detail){D.print("lockedEscrow_specific_no_sale" # debug_show(lockedEscrow_specific_no_sale))}}; + + + //escrow for a general nft with no nfts + let lockedEscrow_specific_sale = await a_wallet.try_escrow_general_staged(Principal.fromActor(b_wallet), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, null, ?(startTime + DAY_LENGTH)); + debug{ if(debug_channel.withdraw_detail){D.print("lockedEscrow_specific_sale" # debug_show(lockedEscrow_specific_sale))}}; + + + //escrow for a specific nft with no sale running + let lockedEscrow_general_no_sale = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx),null , 1 * 10 ** 8, "3", null, null, ?(startTime + DAY_LENGTH)); + debug{ if(debug_channel.withdraw_detail){D.print("lockedEscrow_general_no_sale" # debug_show(lockedEscrow_general_no_sale))}}; + + + //escrow for a specific nft with sale running + let lockedEscrow_general_sale = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx),null, 1 * 10 ** 8, "2", null, null, ?(startTime + DAY_LENGTH)); + debug{ if(debug_channel.withdraw_detail){D.print("lockedEscrow_general_sale" # debug_show(lockedEscrow_general_sale))}}; + + + let balances = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("did escrows work" # debug_show( + lockedEscrow_specific_no_sale, + lockedEscrow_specific_sale, + lockedEscrow_general_no_sale, + lockedEscrow_general_sale, + balances)); + + //try to withdraw + + D.print("trying withdrawls"); + + let withdraw_before_lock_1 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(canister), + "", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_before_lock_1" # debug_show(withdraw_before_lock_1))}}; + + let withdraw_before_lock_2 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(b_wallet), + "", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_before_lock_2" # debug_show(withdraw_before_lock_2))}}; + + + let withdraw_before_lock_3 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(this), + "2", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_before_lock_3" # debug_show(withdraw_before_lock_3))}}; + + + let withdraw_before_lock_4 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(this), + "3", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_before_lock_4" # debug_show(withdraw_before_lock_4))}}; + + + /* D.print("first withdraw results" # debug_show( + withdraw_before_lock_1, + withdraw_before_lock_2, + withdraw_before_lock_3, + withdraw_before_lock_4)); */ + + + + + let atime2 = await canister.__advance_time(startTime + DAY_LENGTH + 1); + + let end_sale = await canister.sale_nft_origyn(#end_sale("2")); + + //try withdraws again + + let withdraw_after_lock_1 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(canister), + "", + (1 * 10 ** 8) , + null + ); + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_after_lock_1" # debug_show(withdraw_after_lock_1))}}; + + + let withdraw_after_lock_2 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(b_wallet), + "", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_after_lock_2" # debug_show(withdraw_after_lock_2))}}; + + + let withdraw_after_lock_3 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(this), + "2", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_after_lock_3" # debug_show(withdraw_after_lock_3))}}; + + + let withdraw_after_lock_4 = await a_wallet.try_escrow_withdraw( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(this), + "3", + (1 * 10 ** 8) , + null + ); + + debug{ if(debug_channel.withdraw_detail){D.print("withdraw_after_lock_4" # debug_show(withdraw_after_lock_4))}}; + + + /* D.print("second withdraw results" # debug_show( + withdraw_after_lock_1, + withdraw_after_lock_2, + withdraw_after_lock_3, + withdraw_after_lock_4)); */ + + //test balances + + let to = AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null)); + + let a_wallet_balance = await dfx.account_balance({account= Blob.fromArray(to)}); + + let suite = S.suite("test locked deposit", [ + + S.test("fail if witdraw locked for general owner", switch(withdraw_before_lock_1){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 3008){ //since the requestor isnt the owner and this isnt minted we wont reveal it is a real token + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //NFT-228 + S.test("fail if witdraw locked for non owner", switch(withdraw_before_lock_2){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 3008){ //since the requestor isnt the owner and this isnt minted we wont reveal it is a real token + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //NFT-228 + S.test("fail if witdraw locked for specific with sale", switch(withdraw_before_lock_3){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 3008){ + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //NFT-228 + S.test("fail if witdraw locked for specific with no sale", switch(withdraw_before_lock_4){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 3008){ //since the requestor isnt the owner and this isnt minted we wont reveal it is a real token + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //NFT-228 + S.test("pass withdraw for general owner if past date", switch(withdraw_after_lock_1){case(#ok(res)){"expected success"};case(#err(err)){ + + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-228 + S.test("pass withdraw for general non-owner if past date", switch(withdraw_after_lock_2){case(#ok(res)){"expected success"};case(#err(err)){ + + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-228 + S.test("pass withdraw for specific if sale over", switch(withdraw_after_lock_3){case(#ok(res)){"expected success"};case(#err(err)){ + + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-228 + S.test("pass withdraw for specific if no sale", switch(withdraw_after_lock_4){case(#ok(res)){"expected success"};case(#err(err)){ + + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-228 + + + ]); + + S.run(suite); + + return #success; + }; + + + public shared func testMarketTransfer() : async {#success; #fail : Text} { + D.print("running testMarketTransfer"); + + + + D.print("making wallets"); + + let a_wallet = await TestWalletDef.test_wallet(); + + D.print("making factory"); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + D.print("have canister"); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + + D.print("finished stage"); + D.print(debug_show(standardStage.0)); + + //MKT0015 try the sale before there is an escrow + let blind_market_fail = await canister.market_transfer_nft_origyn({ + token_id = "1"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(canister)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = "1"; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print("blind market fail"); + D.print(debug_show(blind_market_fail)); + + + //MKT0008 Should fail + D.print("calling try_sale_staged"); + let a_wallet_try_staged_market = await a_wallet.try_sale_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx)); + + D.print(debug_show(a_wallet_try_staged_market)); + + + D.print("calling try_escrow_specific_staged"); + //ESC0003. try to escrow for the specific item; should fail + let a_wallet_try_escrow_specific_staged = await a_wallet.try_escrow_specific_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), ?1, 1 * 10 ** 8, "1", null, null, null); + + D.print(debug_show(a_wallet_try_escrow_specific_staged)); + + //ESC0002. try to escrow for the canister; should succeed + //fund a_wallet + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + D.print("funding result"); + D.print(debug_show(funding_result)); + + //sent an escrow for a dip20 deposit that doesn't exist + D.print("sending an escrow with no deposit"); + let a_wallet_try_escrow_general_fake = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), ?34, 1 * 10 ** 8, null, null); + + + //send a payment to the ledger + D.print("sending tokens to canisters"); + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (1 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + D.print("send to canister"); + D.print(debug_show(a_wallet_send_tokens_to_canister)); + + let block = switch(a_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + //sent an escrow for a ledger deposit that doesn't exist + let a_wallet_try_escrow_general_fake_amount = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), null, 2 * 10 ** 8, null, null); + + ////ESC0001 + + D.print("Sending real escrow now"); + let a_wallet_try_escrow_general_staged = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, null, null); + + D.print("try escrow genreal stage"); + D.print(debug_show(a_wallet_try_escrow_general_staged)); + + //ESC0005 should fail if you try to calim a deposit a second time + let a_wallet_try_escrow_general_staged_retry = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, null, null); + + //check balance and make sure we see the escrow BAL0002 + let a_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("thebalance"); + D.print(debug_show(a_balance)); + + //MKT0007, MKT0014 + D.print("blind market"); + let blind_market = await canister.market_transfer_nft_origyn({ + token_id = "1"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(canister)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = ""; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print(debug_show(blind_market)); + + //MKT0014 todo: check the transaction record and confirm the gensis reocrd + + //BAL0005 + let a_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + //BAL0003 + let canister_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(canister))); + + //MKT0013, MKT0011 this item should be minted now + let test_metadata = await canister.nft_origyn("1"); + D.print("This thing should have been minted"); + D.print(debug_show(test_metadata)); + switch(test_metadata){ + case(#ok(val)){ + D.print(debug_show(Metadata.is_minted(val.metadata))); + }; + case(_){}; + }; + + //MINT0026 shold fail because the purchase of a staged item should mint it + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(a_wallet))); + D.print("This thing should have not been minted"); + D.print(debug_show(mint_attempt)); + + //ESC0009 + let blind_market2 = await canister.market_transfer_nft_origyn({ + token_id = "2"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(canister)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = ""; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print("This thing should have not been minted either"); + D.print(debug_show(blind_market2)); + + //mint the third item to test a specific sale + let mint_attempt3 = await canister.mint_nft_origyn("3", #principal(Principal.fromActor(this))); + + D.print("mint attempt 3"); + D.print(debug_show(mint_attempt3)); + + //creae an new wallet for testing + let b_wallet = await TestWalletDef.test_wallet(); + + //give b_wallet some tokens + let b_funding_result =await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + D.print("funding result"); + D.print(debug_show(funding_result)); + + //send a payment to the the new owner(this actor- after mint) + D.print("sending tokens to canisters"); + + let b_wallet_send_tokens_to_canister = await b_wallet.send_ledger_payment(Principal.fromActor(dfx), (1 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + D.print("send to canister"); + D.print(debug_show(b_wallet_send_tokens_to_canister)); + + let b_block = switch(b_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + //make sure a user can't escrow for an owner that doesn't own NFT + let b_wallet_try_escrow_wrong_owner = await b_wallet.try_escrow_specific_staged(Principal.fromActor(a_wallet), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, "3", null, null, null); + D.print("b_wallet_try_escrow_wrong_owner: " # debug_show(b_wallet_try_escrow_wrong_owner)); + + //ESC0002 + D.print("Sending real escrow now"); + let b_wallet_try_escrow_specific_staged = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, "3", null, null, null); + + // + D.print("try escrow specific stage"); + D.print(debug_show(b_wallet_try_escrow_specific_staged)); + + + //MKT0010 + D.print("apecific market"); + let specific_market = await canister.market_transfer_nft_origyn({ + token_id = "3"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(b_wallet)); + token_id = "3"; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print(debug_show(specific_market)); + + //test balances + + let suite = S.suite("test market Nft", [ + + S.test("fail if no escrow exists for general staged sale", switch(blind_market_fail){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //since the requestor isnt the owner and this isnt minted we wont reveal it is a real token + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //MKT0015 + S.test("fail if non owner trys to sell", switch(a_wallet_try_staged_market){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //since the requestor isnt the owner and this isnt minted we wont reveal it is a real token + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //MKT0008 + S.test("owner can sell staged NFT - produces sale_id", switch(blind_market){case(#ok(res)){ + D.print("found blind market response"); + D.print(debug_show(res)); + if(res.index == 1){ + "found genesis record id" + } else { + "no sales id " + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found genesis record id"))), //MKT0007, MKT0014 + S.test("fail if escrow is double processed", switch(a_wallet_try_escrow_general_staged_retry){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number ==3003){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0005 + S.test("fail if mint is called on a minted item", switch(mint_attempt){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 10){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MINT0026 + + S.test("item is minted now", switch(test_metadata){case(#ok(res)){ + if(Metadata.is_minted(res.metadata) == true){ + "was minted" + } else { + "was not minted" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was minted"))), //MKT0013 + S.test("item is owned by correct owner after minting", switch(test_metadata){case(#ok(res)){ + if(Types.account_eq(switch(Metadata.get_nft_owner(res.metadata)){ + case(#err(err)){#account_id("invalid")}; + case(#ok(val)){D.print(debug_show(val));val}; + }, #principal(Principal.fromActor(a_wallet)) ) == true){ + "was transfered" + } else { + D.print("awallet"); + D.print(debug_show(Principal.fromActor(a_wallet))); + "was not transfered" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was transfered"))), //MKT0011 + + S.test("fail if escrow already spent", switch(blind_market2){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number ==3000){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0009 + S.test("fail if escrowing for a specific item and it is only staged", switch(a_wallet_try_escrow_specific_staged){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number ==4){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0003 + S.test("fail if escrowing for a non existant deposit", switch(a_wallet_try_escrow_general_fake){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3003){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0006 + S.test("fail if escrowing for an existing deposit but fake amount", switch(a_wallet_try_escrow_general_fake_amount){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3003){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0011 + S.test("can escrow for general unminted item", switch(a_wallet_try_escrow_general_staged){case(#ok(res)){ + D.print("an amount for escrow"); + D.print(debug_show(res.receipt)); + if(res.receipt.amount == 1*10**8){ + "was escrowed" + } else { + "was not escrowed" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was escrowed"))), //ESC0002 + + + S.test("escrow deposit transaction", switch(a_wallet_try_escrow_general_staged){case(#ok(res)){ + + switch(res.transaction.txn_type){ + case(#escrow_deposit(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + Types.account_eq(details.seller, #principal(Principal.fromActor(canister))) and + details.amount == ((1*10**8)) and + details.token_id == "" and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history sale"; + }; + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //NFT-72 + S.test("can't escrow for wrong NFT owner", switch(b_wallet_try_escrow_wrong_owner){case(#err(err)){ + if(err.number == 3002){ + "correct number" + } else { + "wrong error " # debug_show(err.number); + }};case(#ok(res)){"unexpected success: " # debug_show(res)};}, + M.equals(T.text("correct number"))), + S.test("can escrow for specific item", switch(b_wallet_try_escrow_specific_staged){case(#ok(res)){ + if(res.receipt.amount == 1*10**8){ + "was escrowed" + } else { + "was not escrowed" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was escrowed"))), //ESC0001 + S.test("owner can sell specific NFT - produces sale_id", switch(specific_market){case(#ok(res)){ + if(res.token_id == "3"){ + "found tx record" + } else { + D.print(debug_show(res)); + "no sales id " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found tx record"))), //MKT0010 + S.test("escrow balance is shown", switch(a_balance){case(#ok(res)){ + D.print(debug_show(res)); + D.print(debug_show(#principal(Principal.fromActor(canister)))); + D.print(debug_show(#principal(Principal.fromActor(a_wallet)))); + D.print(debug_show(Principal.fromActor(dfx))); + if(Types.account_eq(res.escrow[0].seller, #principal(Principal.fromActor(canister))) and + Types.account_eq(res.escrow[0].buyer, #principal(Principal.fromActor(a_wallet))) and + res.escrow[0].token_id == "" and + Types.token_eq(res.escrow[0].token, #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + })) + ){ + "found escrow record" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found escrow record"))), //BAL0001 + S.test("escrow balance is removed", switch(a_balance2){case(#ok(res)){ + D.print(debug_show(res)); + if(res.escrow.size() == 0 ){ + "no escrow record" + } else { + D.print(debug_show(res)); + "found record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("no escrow record"))), //BAL0005 + S.test("sale balance is shown", switch(a_balance){case(#ok(res)){ + D.print(debug_show(res)); + D.print(debug_show(#principal(Principal.fromActor(canister)))); + D.print(debug_show(#principal(Principal.fromActor(a_wallet)))); + D.print(debug_show(Principal.fromActor(dfx))); + if(Types.account_eq(res.escrow[0].buyer, #principal(Principal.fromActor(a_wallet))) and + Types.account_eq(res.escrow[0].seller, #principal(Principal.fromActor(canister))) and + res.escrow[0].token_id == "" and + Types.token_eq(res.escrow[0].token, #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + })) and + res.escrow[0].amount == 1*10**8 + ){ + "found sale record" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found sale record"))), //BAL0003 + + ]); + + S.run(suite); + + return #success; + }; + + + public shared func testRoyalties() : async {#success; #fail : Text} { + D.print("running testRoyalties"); + D.print("making wallets"); + + let a_wallet = await TestWalletDef.test_wallet(); //purchaser + let b_wallet = await TestWalletDef.test_wallet(); //broker + let n_wallet = await TestWalletDef.test_wallet(); //node + let o_wallet = await TestWalletDef.test_wallet(); //originator + let net_wallet = await TestWalletDef.test_wallet(); //net + + D.print("making factory"); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + D.print("have canister"); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + let standardStage_collection = await utils.buildCollection( + canister, + Principal.fromActor(canister), + Principal.fromActor(n_wallet), + Principal.fromActor(o_wallet), + 2048000); + + let updateNetwork = canister.collection_update_nft_origyn(#UpdateNetwork(?Principal.fromActor(net_wallet))); + + + D.print("calling stage"); + + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + + let mint_attempt3 = await canister.mint_nft_origyn("2", #principal(Principal.fromActor(this))); + let mint_attempt4 = await canister.mint_nft_origyn("3", #principal(Principal.fromActor(this))); + + D.print("finished stage"); + D.print(debug_show(standardStage.0)); + + //fund a_wallet + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + let funding_result2 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + //send a payment to the ledger + D.print("sending tokens to canisters"); + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (5 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + D.print("send to canister"); + D.print(debug_show(a_wallet_send_tokens_to_canister)); + + let block = switch(a_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending real escrow now"); + let a_wallet_try_escrow_general_staged = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, null, null); + + D.print("try escrow genreal stage"); + D.print(debug_show(a_wallet_try_escrow_general_staged)); + + let a_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let n_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(n_wallet))); + let o_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(o_wallet))); + let canister_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(canister))); + let net_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(net_wallet))); + + + D.print("primary sale"); + let primary_sale = await canister.market_transfer_nft_origyn({ + token_id = "1"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(canister)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = ""; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = ?Principal.fromActor(b_wallet); + }; + + }); + + D.print(debug_show(primary_sale)); + + //MKT0014 todo: check the transaction record and confirm the gensis reocrd + + //BAL0005 + let a_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let n_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(n_wallet))); + let o_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(o_wallet))); + let canister_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(canister))); + let net_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(net_wallet))); + + D.print("a wallet " # debug_show((a_balance, a_balance2))); + D.print("b wallet " # debug_show((b_balance, b_balance2))); + D.print("n wallet " # debug_show((n_balance, n_balance2))); + D.print("o wallet " # debug_show((o_balance, o_balance2))); + D.print("canister wallet " # debug_show((canister_balance, canister_balance2))); + D.print("net wallet " # debug_show((net_balance, net_balance2))); + + let test_metadata = await canister.nft_origyn("1"); + + D.print("Sending real escrow now"); + let a_wallet_try_escrow_specific_staged = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, "2", null, null, null); + + + + //MKT0010 + D.print("secondary sale"); + let specific_market = await canister.market_transfer_nft_origyn({ + token_id = "2"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = "2"; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = ?Principal.fromActor(b_wallet); + }; + + }); + + D.print("secondary result" # debug_show(specific_market)); + + let a_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let n_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(n_wallet))); + let o_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(o_wallet))); + let canister_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(canister))); + let net_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(net_wallet))); + + //withdraw sale + let #ok(b_withdraw) = b_balance2; + D.print(debug_show(b_withdraw)); + let #principal(b_buyer) = b_withdraw.sales[0].buyer; + + D.print(debug_show(Principal.fromActor(b_wallet))); + D.print(debug_show(b_buyer)); + + + let b_withdraw_attempt_sale = await b_wallet.try_sale_withdraw(Principal.fromActor(canister), b_buyer, Principal.fromActor(dfx), Principal.fromActor(b_wallet), "", b_withdraw.sales[0].amount, null); + + D.print("trying withdraw2"); + let #ok(b_withdraw2) = b_balance3; + D.print("withdraw 2 " # debug_show(b_withdraw2)); + let #principal(b_buyer2) = b_withdraw2.sales[1].buyer; + let b_withdraw_attempt_sale2 = await b_wallet.try_sale_withdraw(Principal.fromActor(canister), b_buyer2, Principal.fromActor(dfx), Principal.fromActor(b_wallet), "2", b_withdraw2.sales[1].amount, null); + + let b_balance4 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + + D.print("did I get my tokens " # debug_show(b_withdraw_attempt_sale)); + D.print("did I get my tokens2 " # debug_show(b_withdraw_attempt_sale2)); + + + //start an auction by owner + let start_auction_attempt_owner = await canister.market_transfer_nft_origyn({token_id = "3"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(1 * 10 ** 8); + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8); + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(get_time() + DAY_LENGTH); + min_increase = #amount(10*10**8); + allow_list = null; + }; + }; } ); + + D.print("get sale id"); + let current_sales_id = switch(start_auction_attempt_owner){ + case(#ok(val)){ + switch(val.txn_type){ + case(#sale_opened(sale_data)){ + sale_data.sale_id; + }; + case(_){ + D.print("Didn't find expected sale_opened"); + return #fail("Didn't find expected sale_opened"); + } + }; + + }; + case(#err(item)){ + D.print("error with auction start"); + return #fail("error with auction start"); + }; + }; + + //place escrow + let end_date = get_time() + DAY_LENGTH + DAY_LENGTH; + D.print("sending tokens to canisters"); + + //balance should be 2 ICP + 400000 + + D.print("Sending real escrow now a wallet trye scrow"); + //claiming first escrow + let a_wallet_try_escrow_general_staged2 = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, "3", ?current_sales_id, null, null); + + //place a valid bid MKT0027 + let a_wallet_try_bid_valid = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**8, "3", current_sales_id, ?Principal.fromActor(b_wallet)); + D.print("a_wallet_try_bid_valid " # debug_show(a_wallet_try_bid_valid)); + + //advance time + let mode = canister.__set_time_mode(#test); + let time_result = await canister.__advance_time(end_date + 1); + D.print("new time"); + D.print(debug_show(time_result)); + + //end auction + let end_proper = await canister.sale_nft_origyn(#end_sale("3")); + D.print("end proper"); + D.print(debug_show(end_proper)); + + let a_balance5 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_balance5 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let n_balance5 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(n_wallet))); + let o_balance5 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(o_wallet))); + let canister_balance5 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(canister))); + let net_balance5 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(net_wallet))); + + //test balances + + let suite = S.suite("test royalties", [ + + + S.test("fail if node does not get royalty", switch(n_balance2){case(#ok(res)){ + + if(res.sales.size() == 1){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if broker does not get royalty", switch(b_balance2){case(#ok(res)){ + + if(res.sales.size() == 1){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if network does not get royalty", switch(net_balance2){case(#ok(res)){ + if(res.sales.size() == 1){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if node does not get second royalty", switch(n_balance3){case(#ok(res)){ + + if(res.sales.size() == 2){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if broker does not get second royalty", switch(b_balance3){case(#ok(res)){ + + if(res.sales.size() == 2){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if network does not get second royalty", switch(net_balance3){case(#ok(res)){ + if(res.sales.size() == 2){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if originator does not get first royalty", switch(o_balance3){case(#ok(res)){ + if(res.sales.size() == 1){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if broker still has balance after withdraw", switch(b_balance4){case(#ok(res)){ + if(res.sales.size() == 0){ + "found empty royalty" + } else { + "not found" # debug_show(b_balance4); + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found empty royalty"))), + S.test("fail if node does not get third royalty", switch(n_balance5){case(#ok(res)){ + + if(res.sales.size() == 3){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if broker does not get new royalty", switch(b_balance5){case(#ok(res)){ + + if(res.sales.size() == 1){ + "found royalty" + } else { + "not found" # debug_show(b_balance5); + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if network does not get third royalty", switch(net_balance5){case(#ok(res)){ + if(res.sales.size() == 3){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + S.test("fail if originator does not get second royalty", switch(o_balance5){case(#ok(res)){ + if(res.sales.size() == 2){ + "found royalty" + } else { + "not found" + }};case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};}, M.equals(T.text("found royalty"))), + + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testOwnerTransfer() : async {#success; #fail : Text} { + D.print("running testOwnerTransfer"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + + + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(a_wallet))); + + //TRX0004 + let trxattempt_fail = await c_wallet.try_owner_transfer(Principal.fromActor(canister), "1", #principal(Principal.fromActor(b_wallet))); + + //TRX0002 + let trxattempt = await a_wallet.try_owner_transfer(Principal.fromActor(canister), "1", #principal(Principal.fromActor(b_wallet))); + + let suite = S.suite("test staged Nft", [ + + S.test("owner can transfer", switch(trxattempt){case(#ok(res)){ + switch(res.transaction.txn_type){ + case(#owner_transfer(details)){ + if(Types.account_eq(details.from, #principal(Principal.fromActor(a_wallet))) == false){ + "from didnt match"; + } else if(Types.account_eq(details.to, #principal(Principal.fromActor(b_wallet))) == false){ + "to didnt match"; + } else { + "correct response"; + }; + }; + case(_){ + "wrong tx type"; + } + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //TRX0002 + S.test("fail if transfering for an item you don't own", switch(trxattempt_fail){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 11){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //ESC0011 + + ]); + + S.run(suite); + + return #success; + + + + }; + + public shared func testAuction() : async {#success; #fail : Text} { + D.print("running Auction"); + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + + let funding_result_a = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + + let funding_result_b = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + let funding_result_b2 = await dfx2.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + D.print("funding result b2 " # debug_show(funding_result_b2)); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let mode = canister.__set_time_mode(#test); + let atime = canister.__advance_time(Time.now()); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(this), 1024, false); //for auctioning a minted item + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(this), 1024, false); //for auctioning an unminted item + + D.print("Minting"); + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(this))); //mint to the test account + let mint_attempt2 = await canister.mint_nft_origyn("2", #principal(Principal.fromActor(this))); //mint to the test account + + D.print("start auction fail"); + //non owner start auction should fail MKT0019 + let start_auction_attempt_fail = await a_wallet.try_start_auction(Principal.fromActor(canister), Principal.fromActor(dfx), "1", null); + + D.print("start auction owner"); + //start an auction by owner + let start_auction_attempt_owner = await canister.market_transfer_nft_origyn({token_id = "1"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(100 * 10 ** 8); + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8);//nyi + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(get_time() + DAY_LENGTH); + min_increase = #amount(10*10**8); + allow_list = null; + }; + }; } ); + + D.print("get sale id"); + let current_sales_id = switch(start_auction_attempt_owner){ + case(#ok(val)){ + switch(val.txn_type){ + case(#sale_opened(sale_data)){ + sale_data.sale_id; + }; + case(_){ + D.print("Didn't find expected sale_opened"); + return #fail("Didn't find expected sale_opened"); + } + }; + + }; + case(#err(item)){ + D.print("error with auction start"); + return #fail("error with auction start"); + }; + }; + + D.print("starting again"); + //try starting again//should fail MKT0018 + let start_auction_attempt_owner_already_started = await canister.market_transfer_nft_origyn({token_id = "1"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(100 * 10 ** 8); + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8); + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(get_time() + DAY_LENGTH); + min_increase = #amount(10*10**8); + allow_list = null; + }; + }; } ); + + //MKT0020 - try to transfer with an open auction + let transfer_owner_after_auction = await canister.share_wallet_nft_origyn({token_id = "1"; from = #principal(Principal.fromActor(this)); to = #principal(Principal.fromActor(b_wallet))}); + + //place escrow + D.print("sending tokens to canisters"); + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (4 * 10 ** 8) + 800000, Principal.fromActor(canister)); + + //balance should be 4 ICP + 800000 + + let block = switch(a_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work" # debug_show(other)); + return #fail("ledger didnt work"); + }; + }; + + D.print("trying deposit"); + //try to witdraw back 1 icp from deposit + let a_wallet_try_deposit_refund = await a_wallet.try_deposit_refund(Principal.fromActor(canister), Principal.fromActor(dfx), 1 * 10 ** 8, null); + + D.print("a_wallet_try_deposit_refund" # debug_show(a_wallet_try_deposit_refund)); + D.print("Sending real escrow now a wallet trye scrow"); + + //claiming first escrow + let a_wallet_try_escrow_general_staged = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, "1", ?current_sales_id, null, null); + + D.print("should be done now"); + let a_balance_before_first = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("the balance before first is"); + D.print(debug_show(a_balance_before_first)); + + //place a bid below start price + + let a_wallet_try_bid_below_start = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**7 + 200000, "1", current_sales_id, null); + //aboves should refund the bid + //todo: bid should be refunded + + let a_balance_after_bad_bid = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance " # debug_show(a_balance_after_bad_bid)); + + + D.print("Sending real escrow now 2"); + let a_wallet_try_escrow_general_staged2b = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 2 * 10 ** 8, "1", ?current_sales_id, null, null); + //this should clear out the deposit account + + D.print("Sending real escrow now result 2" # debug_show(a_wallet_try_escrow_general_staged2b)); + + let a_balance_after_bad_bid2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance 2 " # debug_show(a_balance_after_bad_bid2)); + + //try a bid in th wrong currency + //place escrow + D.print("sending tokens to canisters b"); + let b_wallet_send_tokens_to_canister = await b_wallet.send_ledger_payment(Principal.fromActor(dfx2), (200 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + let block2b = switch(b_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending escrow for wrong currency escrow now b"); + let b_wallet_try_escrow_wrong_currency = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx2), null, 1 * 10 ** 8, "1", ?current_sales_id, null, null); + + + //place a bid wiht wrong asset MKT0023 + let b_wallet_try_bid_wrong_asset = await b_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx2), 1*10**8, "1", current_sales_id, null); + + //place a bid on token that isn't for sale MKT0024 + let a_wallet_try_bid_wrong_token_id_not_exist = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**8, "2", current_sales_id, null); + + //try starting again//should fail MKT0018 + let end_date = get_time() + DAY_LENGTH; + D.print("end date is "); + D.print(debug_show(end_date)); + //todo: write test + let start_auction_attempt_owner_already_started_b = await canister.market_transfer_nft_origyn({token_id = "2"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(100 * 10 ** 8); + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8); + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(end_date); + min_increase = #amount(10*10**8); + allow_list = null; + }; + }; } ); + + //place a bid on token that isn't for sale MKT0024 + let a_wallet_try_bid_wrong_token_id_exists = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**8, "2", current_sales_id, null); + + //place a bid with bad owner data MKT0025 + let a_wallet_try_bid_wrong_owner = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), 1*10**8, "1", current_sales_id, null); + + //place a bid with bad sales id MKT0026 + let a_wallet_try_bid_wrong_sales_id = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**8, "1", "test", null); + + let a_balance_after_bad_bid3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance 3 " # debug_show(a_balance_after_bad_bid2)); + + + //place a valid bid MKT0027 + let a_wallet_try_bid_valid = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**8, "1", current_sales_id, null); + D.print("a_wallet_try_bid_valid " # debug_show(a_wallet_try_bid_valid)); + + let a_balance_after_bad_bid4 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance 4 " # debug_show(a_balance_after_bad_bid4)); + + //check transaction log for bid MKT0033, TRX0005 + let a_history_1 = await canister.history_nft_origyn("1", null, null); //gets all history + + D.print("history1" # debug_show(a_history_1)); + + //make sure next min bid is bid + minimum increase MKT0032 + let a_sale_status_min_bid_increase = await canister.nft_origyn("1"); + + D.print("withdraw during bid"); + //todo: attempt to withdraw escrow for active bid should fail ESC0016 NFT-76 + let a_withdraw_during_bid = await a_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(a_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", 1 * 10 ** 8, null); + + D.print("passed this"); + //place escrow b + let new_bid_val = switch (a_sale_status_min_bid_increase){ + case(#ok(res)){ + + switch(res.current_sale){ + case(?current_sale){ + switch(NFTUtils.get_auction_state_from_statusStable(current_sale)){ + case(#err(err)){return #fail("cannot get min bid to make second bid");}; + case(#ok(res)){ + res.min_next_bid; + }; + }; + }; + case(null){ + return #fail("no sale found for finding min bid for second bid"); + }; + }; + }; + case(#err(err)){ + return #fail("cannot get min bid to make second bid"); + }; + }; + + + //deposit escrow for two upcoming bids + D.print("sending tokens to canisters"); + let b_wallet_send_tokens_to_canister_correct_ledger = await b_wallet.send_ledger_payment(Principal.fromActor(dfx), (new_bid_val * 2 ) + 400000, Principal.fromActor(canister)); + + + D.print("Sending escrow for correct currency escrow now"); + let b_wallet_try_escrow_too_low = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, new_bid_val - 10, "1", ?current_sales_id, null, null); + + + + //place a low bid bid + let b_wallet_try_bid_to_low = await b_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), new_bid_val - 10, "1", current_sales_id, null); + + + + D.print("Sending escrow for correct currency escrow now"); + let b_wallet_try_escrow_correct_currency2 = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, new_bid_val, "1", ?current_sales_id, null, null); + + + //place a second bid + let b_wallet_try_bid_valid = await b_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), new_bid_val, "1", current_sales_id, null); + + D.print("did b bid work? "); + D.print(debug_show(b_wallet_try_bid_valid)); + + let b_balance_after_bid = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); //gets all history + + D.print("found balance after bid "); + D.print(debug_show(b_balance_after_bid)); + + //check transaction log for bid MKT0033, TRX0005 + let b_history_1 = await canister.history_nft_origyn("1", null, null); //gets all history + + D.print("found balance after bid "); + D.print(debug_show(b_history_1)); + + //place more escrow a + //make sure next min bid is bid + minimum increase MKT0032 + let b_sale_status_min_bid_increase = await canister.nft_origyn("1"); + + let new_bid_val_b = switch (b_sale_status_min_bid_increase){ + case(#ok(res)){ + + switch(res.current_sale){ + case(?current_sale){ + switch(NFTUtils.get_auction_state_from_statusStable(current_sale)){ + case(#err(err)){return #fail("cannot get min bid to make third bid");}; + case(#ok(res)){ + res.min_next_bid; + }; + }; + }; + case(null){ + return #fail("no sale found for finding min bid for third bid"); + }; + }; + }; + case(#err(err)){ + return #fail("cannot get min bid to make third bid"); + }; + }; + + let a_balance_before_third_escrow = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("the balance before third escrow is"); + D.print(debug_show(a_balance_before_third_escrow)); + + let a_wallet_send_tokens_to_canister2 = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (101 * 10 ** 8 ) + 200000, Principal.fromActor(canister)); + + let block3 = switch(a_wallet_send_tokens_to_canister2){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending real escrow now 3"); //escrow is for 100. There should already be 1 in the escrow account. we are going to bid 101 above reserve + let a_wallet_try_escrow_specific_3 = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 101 * 10 ** 8, "1", ?current_sales_id, null, null); + + D.print("specific result is"); + D.print(debug_show(a_wallet_try_escrow_specific_3)); + //todo check escrow balance + //check balance and make sure we see the escrow BAL0002 + let a_balance_before_third = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("the balance before third is"); + D.print(debug_show(a_balance_before_third)); + + //place a third bid + let a_wallet_try_bid_valid_3 = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 101 * 10 ** 8, "1", current_sales_id, null); + D.print("valid 3"); + D.print(debug_show(a_wallet_try_bid_valid_3)); + + //try to end auction before it is time should fail + let end_before = await canister.sale_nft_origyn(#end_sale("1")); + D.print("end before"); + D.print(debug_show(end_before)); + D.print("end before"); + + //advance time + let time_result = await canister.__advance_time(end_date + 1); + D.print("new time"); + D.print(debug_show(time_result)); + + //end auction + let end_proper = await canister.sale_nft_origyn(#end_sale("1")); + D.print("end proper"); + D.print(debug_show(end_proper)); + + //end again, should fail + let end_again = await canister.sale_nft_origyn(#end_sale("1")); + D.print("end again"); + D.print(debug_show(end_again)); + + //try to withdraw winning bid NFT-110 + let a_withdraw_during_win = await a_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(a_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", 101 * 10 ** 8, null); + + //NFT-94 check ownership + //check balance and make sure we see the nft + let a_balance_after_close = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + // //MKT0029, MKT0036 + let a_sale_status_over_new_owner = await canister.nft_origyn("1"); + + // //check transaction log + + //check transaction log for sale + let a_history_3 = await canister.history_nft_origyn("1", null, null); //gets all history + + + // //a tries to start a new sale + + // //item is replaced in the current sale + let b_balance_before_withdraw = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); //gets all history + + D.print("found balance before escrow withdraw"); + D.print(debug_show(b_balance_before_withdraw)); + //b tries to withdraw more than in account NFT-99 + + let b_withdraw_over = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", 101 * 10 ** 8, null); + + // //b tries to withdraw for other buyer NFT-102 + + let b_withdraw_bad_buyer = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(a_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", new_bid_val, null); + + // //b tries to withdraw for other seller NFT-104 + + let b_withdraw_bad_seller = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), Principal.fromActor(a_wallet), "1", new_bid_val, null); + + // //b tries to withdraw for other tokenid NFT-105 + + let b_withdraw_bad_token_id = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), Principal.fromActor(a_wallet), "32", new_bid_val, null); + + // //b tries to withdraw for other token NFT-103 + + let b_withdraw_bad_token = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), Principal.fromActor(a_wallet), "1", new_bid_val, ?#ic{ + canister=Principal.fromActor(dfx2); + standard= #Ledger; + decimals = 8; + symbol = "LGY"; + fee = 200000;}); + + // //b escrow should be auto refunded - need to test + + let b_withdraw = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", new_bid_val, null); + + D.print("this withdraw should not work"); + D.print(debug_show(b_withdraw)); + //b withdraws escrow again NFT-106 + + let b_withdraw_again = await b_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(b_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", new_bid_val, null); + D.print("this withdraw should not work again"); + D.print(debug_show(b_withdraw_again)); + + //check transaction log for sale + let b_history_withdraw = await canister.history_nft_origyn("1", null, null); //gets all history + + + + //attempt to withdraw the sale revenue for the seller(this canister) + + //NFT-113 + //get balanance and make sure the sale is in the balance + let owner_balance_after_sale = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(this))); + + D.print("withdraw over for owner"); + //NFT-114 + //try to withdraw too much + let owner_withdraw_over = await canister.sale_nft_origyn(#withdraw(#sale({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= "1"; + token = + #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(a_wallet)); + amount = (101*10**8) + 15;}))); + + + + // //NFT-115 + // //have a_wallet try to withdraw the sale + let a_withdraw_attempt_sale = await a_wallet.try_sale_withdraw(Principal.fromActor(canister), Principal.fromActor(a_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", new_bid_val, null); + + + //NFT-116 + //try to withdare the wrong asset + D.print("owner_withdraw_wrong_asset"); + let owner_withdraw_wrong_asset = await canister.sale_nft_origyn(#withdraw(#sale({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= "1"; + token = + #ic({ + canister = Principal.fromActor(dfx2); + standard = #Ledger; + decimals = 8; + symbol = "LGY"; + fee = 200000; + }); + + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(a_wallet)); + amount = 101*10**8;}))); + + //NFT-117 + //todo: try to withdraw the wrong token_id + D.print("owner_withdraw_wrong_token_id"); + let owner_withdraw_wrong_token_id = await canister.sale_nft_origyn(#withdraw(#sale({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= "2"; + token = + #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(a_wallet)); + amount = 101*10**8;}))); + + //NFT-19 + //todo: withdraw the proper amount + D.print("withdrawing proper amount from sale"); + let owner_withdraw_proper = await canister.sale_nft_origyn(#withdraw(#sale({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= "1"; + token = + #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LGY"; + fee = 200000; + }); + + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(a_wallet)); + amount = (101*10**8) - 200000;}))); + + D.print("Proper amount result"); + D.print(debug_show(owner_withdraw_proper)); + //NFT-118 + //todo: try to withdraw again + D.print("trying to withdraw sale again"); + let owner_withdraw_again = await canister.sale_nft_origyn(#withdraw(#sale({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= "1"; + token = + #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(a_wallet)); + amount = 101*10**8;}))); + + // //NFT-118 + // //todo: check balance and make sure it is gone + let owner_balance_after_withdraw = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(this))); + + + //NFT-19 + //todo: check ledger and make sure transaction is there and it went to the right account + //check transaction log for sale + D.print("tring owner hisotry"); + let owner_history_withdraw = await canister.history_nft_origyn("1", null, null); //gets all history + + + + let suite = S.suite("test staged Nft", [ + + S.test("test mint attempt", switch(mint_attempt){case(#ok(res)){ + + "correct response"; + + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), + S.test("fail if non owner tries to start auction", switch(start_auction_attempt_fail){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //unauthorized + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0019 + S.test("fail if auction already running", switch(start_auction_attempt_owner_already_started){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 13){ //existing sale + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0018 + S.test("auction is started", switch(start_auction_attempt_owner){case(#ok(res)){ + switch(res.txn_type){ + case(#sale_opened(details)){ + "correct response"; + }; + case(_){ + "bad transaction type"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //MKT0021 + + S.test("fail if refund isn't succesful", switch(a_wallet_try_deposit_refund){case(#ok(res)){ + "expected success" + }; + case(#err(err)){ + + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("transfer ownerfail if auction already running", switch(transfer_owner_after_auction){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 13){ //existing sale + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0022 + S.test("fail if bid too low", switch(a_wallet_try_bid_below_start){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4004){ //below bid price + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0023 + S.test("fail if wrong asset", switch(b_wallet_try_bid_wrong_asset){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4002){ //wrong asset + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0024 + S.test("fail if cant find sale id ", switch(a_wallet_try_bid_wrong_token_id_not_exist){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4003){ //MKT0024 + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0023 + S.test("fail if bid on wrong token ", switch(a_wallet_try_bid_wrong_token_id_exists){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4003){ //wrong token + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0026 + + S.test("fail if bid on wrong owner ", switch(a_wallet_try_bid_wrong_owner){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4001){ //wrong token + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0025 + S.test("bid is succesful", switch(a_wallet_try_bid_valid){case(#ok(res)){ + D.print("as bid"); + D.print(debug_show(a_wallet_try_bid_valid)); + switch(res.txn_type){ + case(#auction_bid(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == 1*10**8 and + details.sale_id == current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad transaction bid"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //MKT0027 + S.test("transaction history has the bid", switch(a_history_1){case(#ok(res)){ + + D.print("where ismy history"); + D.print(debug_show(a_history_1)); + if(res.size() > 0){ + switch(res[res.size()-1].txn_type){ + case(#auction_bid(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == 1*10**8 and + details.sale_id == current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history bid"; + }; + } + } else { + "size was 0"; + } + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //TRX0005, MKT0033 + S.test("min bid increased", switch(a_sale_status_min_bid_increase){case(#ok(res)){ + + switch(res.current_sale){ + case(?current_sale){ + switch(NFTUtils.get_auction_state_from_statusStable(current_sale)){ + case(#err(err)){"unexpected error: " # err.flag_point}; + case(#ok(res)){ + //let min_bid_increase = + if(res.min_next_bid == (1*10**8) + 10*10**8 ){ + "correct response" + } else { + "wrong bid " # debug_show(res.min_next_bid); + } + }; + }; + + }; + case(_){ + "bad info min bid"; + }; + } + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //MKT0032 + S.test("fail if bid is too low ", switch(b_wallet_try_bid_to_low){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4004){ //too low + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //todo: create user story for bid too low + S.test("transaction history has the new bid", switch(b_history_1){case(#ok(res)){ + + D.print("new bid history"); + D.print(debug_show(b_history_1)); + if(res.size() > 0){ + switch(res[res.size()-1].txn_type){ + case(#auction_bid(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(b_wallet))) and + details.amount == new_bid_val and + details.sale_id == current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match for second bid " # debug_show(details); + }; + }; + case(_){ + "bad history bid for b " # debug_show(res); + }; + } + } else { + "size was zero for new bid"; + } + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //TRX0005, MKT0033 + S.test("escrow balance is right amount for a before thrid bid", switch(a_balance_before_third){case(#ok(res)){ + D.print("testing third bid"); + D.print(debug_show(res)); + D.print(debug_show(#principal(Principal.fromActor(canister)))); + D.print(debug_show(#principal(Principal.fromActor(a_wallet)))); + D.print(debug_show(Principal.fromActor(dfx))); + D.print(debug_show(Types.account_eq(res.escrow[0].seller, #principal(Principal.fromActor(canister))))); + D.print(debug_show(Types.account_eq(res.escrow[0].buyer, #principal(Principal.fromActor(a_wallet))))); + D.print(debug_show(res.escrow[0].token_id == "1")); + D.print(debug_show(Types.token_eq(res.escrow[0].token, #ic({canister = Principal.fromActor(dfx);standard = #Ledger; decimals = 8;symbol = "LDG";fee = 200000;})))); + D.print(debug_show(res.escrow[0].amount == 104 * 10 **8, res.escrow[0].amount , 104 * 10 **8,)); + if(Types.account_eq(res.escrow[0].seller, #principal(Principal.fromActor(this))) and + Types.account_eq(res.escrow[0].buyer, #principal(Principal.fromActor(a_wallet))) and + res.escrow[0].token_id == "1" and + Types.token_eq(res.escrow[0].token, #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + })) and + res.escrow[0].amount == 103 * 10 **8 + ){ + "found escrow record" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found escrow record"))), //todo: MKT0037 + S.test("auction winner is the new owner", switch(a_sale_status_over_new_owner){case(#ok(res)){ + + let new_owner = switch(Metadata.get_nft_owner( + switch (a_sale_status_over_new_owner){ + case(#ok(item)){ + item.metadata; + }; + case(#err(err)){ + #Empty; + }; + })){ + case(#err(err)){ + #account_id("wrong"); + }; + case(#ok(val)){ + val; + }; + }; + D.print("new owner"); + D.print(debug_show(new_owner)); + D.print(debug_show(Principal.fromActor(a_wallet))); + if(Types.account_eq(new_owner, #principal(Principal.fromActor(a_wallet)))){ + "found correct owner" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found correct owner"))), //MKT0029 + S.test("current sale status is ended", switch(a_sale_status_over_new_owner){case(#ok(res)){ + D.print("a_sale_status_over_new_owner"); + D.print(debug_show(a_sale_status_over_new_owner)); + //MKT0036 sale should be over and there should be a record with status #ended + switch (a_sale_status_over_new_owner){ + case(#ok(res)){ + + switch(res.current_sale){ + case(null){ + "current sale improperly removed" + }; + case(?val){ + switch(val.sale_type){ + case(#auction(state)){ + D.print("state"); + D.print(debug_show(state)); + let current_status = switch(state.status){case(#closed){true;};case(_){false}}; + if(current_status == true and + val.sale_id == current_sales_id){ + "found closed sale"; + } else { + "didnt find closed sale"; + }; + + }; + + }; + }; + }; + + }; + case(#err(err)){ + "error getting"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found closed sale"))), // MKT0036 + + + + S.test("transaction history have the transfer - auction", + switch(a_history_3){ + case(#ok(res)){ + + if(res.size() > 0){ + switch(res[res.size()-1].txn_type){ + case(#sale_ended(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == 101*10**8 and + details.sale_id == ?current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history sale"; + }; + }; + } else { + "size was 0" + }; + + }; + case(#err(err)){"unexpected error: " # err.flag_point}; + }, M.equals(T.text("correct response"))), //todo: make a user story for adding a #sale_ended to the end of transaction log + S.test("fail if ended before corect date ", switch(end_before){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4007){ //sale not over + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //todo: create user story for sale not over + S.test("transaction history have the transfer - auction 2", switch(end_proper){case(#ok(#end_sale(res))){ + switch(res.txn_type){ + case(#sale_ended(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == 101*10**8 and + Option.get(details.sale_id, "") == current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history sale"; + }; + } + };case(#err(err)){"unexpected error: " # err.flag_point}; + case(_){"unexpected error: " };}, M.equals(T.text("correct response"))), //todo: make a user story for adding a #sale_ended to the end of transaction log + S.test("fail if auction already over ", switch(end_again){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //new owner so unauthorized + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //todo: create user story for sale over + S.test("fail if escrow amount over deposited amount", switch(b_withdraw_over){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //escrow no longer found since it was refunded + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-101 + S.test("fail if escrow amount is the wrong token", switch(b_withdraw_bad_token){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //shouldn't be able to find escrow + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //t NFT-103 + S.test("fail if escrow amount is the wrong seller", switch(b_withdraw_bad_seller){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //shouldn't be able to find escrow + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //t NFT-104 + S.test("fail if escrow amount is the wrong buyer", switch(b_withdraw_bad_buyer){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //unauthorized + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-102 + S.test("fail if escrow amount is the wrong token_id", switch(b_withdraw_bad_token_id){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //token id not found + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-105 + S.test("fail if escrow removed twice", switch(b_withdraw_bad_token_id){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //withdraw too large because 0 and not found + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-106 + S.test("fail if escrow is for the current winning bid", switch(a_withdraw_during_bid){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3008){ //cannot be removed + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-76 + + S.test("fail if escrow is for the winning bid a withdraw", switch(a_withdraw_during_win){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000 or err.number == 3007){ //wont be able to find it because it has been zeroed out. + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-110 + S.test("fail if escrow is for the winning bid b withdraw", switch(b_withdraw){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //wont be able to find it because it has been zeroed out. + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-18, NFT-101 - These were negated by NFT-120 + //todo: test needs to be re written to cycle through history and find the escrow + /* S.test("escrow withdraw in transaction record", switch(b_history_withdraw){case(#ok(res)){ + D.print("b_history_withdraw"); + D.print(debug_show(b_history_withdraw)); + switch(res[res.size()-1].txn_type){ + case(#escrow_withdraw(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(b_wallet))) and + Types.account_eq(details.seller, #principal(Principal.fromActor(this))) and + details.amount == ((11*10**8) - dip20_fee) and + details.token_id == "1" and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + D.print("Bad history sale"); + D.print(debug_show(res)); + "bad history sale"; + }; + } + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //NFT-107 + */ + S.test("sales balance after sale has balance in it", switch(owner_balance_after_sale){case(#ok(res)){ + D.print("testing sale balance 1"); + D.print(debug_show(res)); + + + if(res.sales.size() > 0){ + D.print(debug_show(res.sales[0].amount == 101 * 10 **8)); + if(Types.account_eq(res.sales[0].seller, #principal(Principal.fromActor(this))) and + Types.account_eq(res.sales[0].buyer, #principal(Principal.fromActor(a_wallet))) and + res.sales[0].token_id == "1" and + Types.token_eq(res.sales[0].token, #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + })) and + res.sales[0].amount == (101 * 10 **8) - 200000 + ){ + "found sales record" + } else { + + D.print(debug_show(res)); + "didnt find record " + }} else { + "sales size 0" + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found sales record"))), //todo: NFT-113 + S.test("fail if withdraw over sale amount", switch(owner_withdraw_over){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3007){ //withdraw too large + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-114 + S.test("fail if withdraw from wrong account", switch(a_withdraw_attempt_sale){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //unauthorized access + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-115 + S.test("fail if withdraw from wrong asset", switch(owner_withdraw_wrong_asset){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //cant find sale + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-116 + S.test("fail if withdraw from wrong token id", switch(owner_withdraw_wrong_token_id){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //cant find sale + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-117 + S.test("fail if withdraw a second time", switch(owner_withdraw_again){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //cant find sale + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-117 + S.test("sale withdraw works", switch(owner_withdraw_proper){case(#ok(#withdraw(res))){ + switch(res.txn_type){ + case(#sale_withdraw(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + Types.account_eq(details.seller, #principal(Principal.fromActor(this))) and + details.amount == (Nat.sub((101*10**8) - 200000, dip20_fee)) and + details.token_id == "1" and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history sale"; + }; + } + };case(#err(err)){"unexpected error: " # err.flag_point}; + case(_){"unexpected error: " }}, M.equals(T.text("correct response"))), //NFT-18, NFT-101 + S.test("sales balance after withdraw has no balance in it", switch(owner_balance_after_withdraw){case(#ok(res)){ + D.print("testing sale balance 2"); + D.print(debug_show(res)); + + + if(res.sales.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + "found a record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found empty record"))), //todo: NFT-118 + S.test("sale withdraw in history", switch(owner_history_withdraw){case(#ok(res)){ + D.print("sales withdraw history"); + D.print(debug_show(res)); + switch(res[res.size()-1].txn_type){ + case(#sale_withdraw(details)){ + D.print(debug_show(details)); + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + Types.account_eq(details.seller, #principal(Principal.fromActor(this))) and + details.amount == (Nat.sub(((101*10**8)-200000),dip20_fee)) and + details.token_id == "1" and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + D.print(debug_show(res[res.size()-1])); + "bad history withdraw"; + }; + } + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //NFT-19 + S.test("nft balance after sale", switch(a_balance_after_close){case(#ok(res)){ + D.print("testing nft balance"); + D.print(debug_show(res)); + + + if(res.nfts.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + if(res.nfts[res.nfts.size()-1] == "1"){ + "found a record" + }else { + "didnt find record" + }; + + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found a record"))), //todo: NFT-94 + + + + + ]); + + D.print("suite running"); + + S.run(suite); + + D.print("suite over"); + + return #success; + + + + }; + + public shared func testStandardLedger() : async {#success; #fail : Text} { + D.print("running testStandardLedger"); + + let a_wallet = await TestWalletDef.test_wallet(); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + + D.print("finished stage"); + D.print(debug_show(standardStage.0)); + + //ESC0002. try to escrow for the canister; should succeed + //fund a_wallet + D.print("funding result start a_wallet"); + D.print(AccountIdentifier.toText(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null)))); + D.print(AccountIdentifier.toText(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + D.print(debug_show(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null)))); + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + D.print("funding result end"); + D.print(debug_show(funding_result)); + + //sent an escrow for a stdledger deposit that doesn't exist + D.print("sending an escrow with no deposit"); + let a_wallet_try_escrow_general_fake = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), ?34, 1 * 10 ** 8, ?#ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}), null); + + + //send a payment to the ledger + D.print("sending tokens to canisters"); + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (1 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + D.print("send to canister a"); + D.print(debug_show(a_wallet_send_tokens_to_canister)); + + debug{ if(debug_channel.throws) D.print("checking block_result")}; + let #ok(block_result) = a_wallet_send_tokens_to_canister; + let block = Nat64.toNat(block_result);//block is no longer relevant for ledgers + + //sent an escrow for a ledger deposit that doesn't exist + let a_wallet_try_escrow_general_fake_amount = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), null, 2 * 10 ** 8, ?#ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}), null); + + D.print("a_wallet_try_escrow_general_fake_amount" # debug_show(a_wallet_try_escrow_general_fake_amount)); + + ////ESC0001 + + D.print("Sending real escrow now"); + let a_wallet_try_escrow_general_staged = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), ?block, 1 * 10 ** 8, ?#ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}), null); + + D.print("try escrow genreal stage"); + D.print(debug_show(a_wallet_try_escrow_general_staged)); + + //ESC0005 should fail if you try to calim a deposit a second time + let a_wallet_try_escrow_general_staged_retry = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), Principal.fromActor(dfx), ?block, 1 * 10 ** 8, ?#ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}), null); + + //check balance and make sure we see the escrow BAL0002 + let a_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("thebalance"); + D.print(debug_show(a_balance)); + + //MKT0007, MKT0014 + D.print("blind market"); + let blind_market = await canister.market_transfer_nft_origyn({ + token_id = "1"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(canister)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = ""; + token = #ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print(debug_show(blind_market)); + + //MKT0014 todo: check the transaction record and confirm the gensis reocrd + + //BAL0005 + let a_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + //BAL0003 + let canister_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(canister))); + + //MKT0013, MKT0011 this item should be minted now + let test_metadata = await canister.nft_origyn("1"); + D.print("This thing should have been minted"); + D.print(debug_show(test_metadata)); + switch(test_metadata){ + case(#ok(val)){ + D.print(debug_show(Metadata.is_minted(val.metadata))); + }; + case(_){}; + }; + + //MINT0026 shold fail because the purchase of a staged item should mint it + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(a_wallet))); + D.print("This thing should have not been minted"); + D.print(debug_show(mint_attempt)); + + //ESC0009 + let blind_market2 = await canister.market_transfer_nft_origyn({ + token_id = "2"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(canister)); + buyer = #principal(Principal.fromActor(a_wallet)); + token_id = ""; + token = #ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print("This thing should have not been minted either"); + D.print(debug_show(blind_market2)); + + //mint the third item to test a specific sale + let mint_attempt3 = await canister.mint_nft_origyn("3", #principal(Principal.fromActor(this))); + + D.print("mint attempt 3"); + D.print(debug_show(mint_attempt3)); + + //creae an new wallet for testing + let b_wallet = await TestWalletDef.test_wallet(); + + //give b_wallet some tokens + D.print("funding result start b_wallet"); + let b_funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + D.print("funding result"); + D.print(debug_show(funding_result)); + + //send a payment to the the new owner(this actor- after mint) + D.print("sending tokens to canisters"); + + let b_wallet_send_tokens_to_canister = await b_wallet.send_ledger_payment(Principal.fromActor(dfx), (1 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + D.print("send to canister b"); + D.print(debug_show(b_wallet_send_tokens_to_canister)); + + let b_block = switch(b_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + Nat64.toNat(ablock); + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + //ESC0002 + D.print("Sending real escrow now"); + let b_wallet_try_escrow_specific_staged = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), ?b_block, 1 * 10 ** 8, "3", null, ?#ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}), null); + + // + D.print("try escrow specific stage"); + D.print(debug_show(b_wallet_try_escrow_specific_staged)); + + + //MKT0010 + D.print("apecific market"); + let specific_market = await canister.market_transfer_nft_origyn({ + token_id = "3"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(Principal.fromActor(this)); + buyer = #principal(Principal.fromActor(b_wallet)); + token_id = "3"; + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + D.print(debug_show(specific_market)); + + //test balances + + let suite = S.suite("test market Nft", [ + S.test("fail if escrow is double processed", switch(a_wallet_try_escrow_general_staged_retry){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number ==3003){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0005 + S.test("fail if mint is called on a minted item", switch(mint_attempt){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 10){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MINT0026 + + S.test("item is minted now", switch(test_metadata){case(#ok(res)){ + if(Metadata.is_minted(res.metadata) == true){ + "was minted" + } else { + "was not minted" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was minted"))), //MKT0013 + S.test("item is owned by correct owner after minting", switch(test_metadata){case(#ok(res)){ + if(Types.account_eq(switch(Metadata.get_nft_owner(res.metadata)){ + case(#err(err)){#account_id("invalid")}; + case(#ok(val)){D.print(debug_show(val));val}; + }, #principal(Principal.fromActor(a_wallet)) ) == true){ + "was transfered" + } else { + D.print("awallet"); + D.print(debug_show(Principal.fromActor(a_wallet))); + "was not transfered" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was transfered"))), //MKT0011 + + S.test("fail if escrow already spent", switch(blind_market2){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number ==3000){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0009 + + S.test("fail if escrowing for a non existant deposit", switch(a_wallet_try_escrow_general_fake){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3003){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0006 + S.test("fail if escrowing for an existing deposit but fake amount", switch(a_wallet_try_escrow_general_fake_amount){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3003){ // + "correct number" + } else{ + "wrong error " # debug_show(err.number); + }};}, M.equals(T.text("correct number"))), //ESC0011 + S.test("can escrow for general unminted item", switch(a_wallet_try_escrow_general_staged){case(#ok(res)){ + D.print("an amount for escrow"); + D.print(debug_show(res.receipt)); + if(res.receipt.amount == 1*10**8){ + "was escrowed" + } else { + "was not escrowed" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was escrowed"))), //ESC0002 + S.test("can escrow for specific item", switch(b_wallet_try_escrow_specific_staged){case(#ok(res)){ + if(res.receipt.amount == 1*10**8){ + "was escrowed" + } else { + "was not escrowed" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("was escrowed"))), //ESC0001 + S.test("owner can sell specific NFT - produces sale_id", switch(specific_market){case(#ok(res)){ + if(res.token_id == "3"){ + "found tx record" + } else { + D.print(debug_show(res)); + "no sales id " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found tx record"))), //MKT0010 + S.test("escrow balance is shown", switch(a_balance){case(#ok(res)){ + D.print("this should be failing for now because it cmpares dip20 but we did ledger"); + D.print(debug_show(res)); + D.print(debug_show(#principal(Principal.fromActor(canister)))); + D.print(debug_show(#principal(Principal.fromActor(a_wallet)))); + D.print(debug_show(Principal.fromActor(dfx))); + if(Types.account_eq(res.escrow[0].seller, #principal(Principal.fromActor(canister))) and + Types.account_eq(res.escrow[0].buyer, #principal(Principal.fromActor(a_wallet))) and + res.escrow[0].token_id == "" and + Types.token_eq(res.escrow[0].token, #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + })) + ){ + "found escrow record" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found escrow record"))), //BAL0001 + S.test("escrow balance is removed", switch(a_balance2){case(#ok(res)){ + D.print(debug_show(res)); + if(res.escrow.size() == 0 ){ + "no escrow record" + } else { + D.print(debug_show(res)); + "found record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("no escrow record"))), //BAL0005 + S.test("sale balance is shown", switch(a_balance){case(#ok(res)){ + D.print(debug_show(res)); + D.print(debug_show(#principal(Principal.fromActor(canister)))); + D.print(debug_show(#principal(Principal.fromActor(a_wallet)))); + D.print(debug_show(Principal.fromActor(dfx))); + if(Types.account_eq(res.escrow[0].buyer, #principal(Principal.fromActor(a_wallet))) and + Types.account_eq(res.escrow[0].seller, #principal(Principal.fromActor(canister))) and + res.escrow[0].token_id == "" and + Types.token_eq(res.escrow[0].token, #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + })) and + res.escrow[0].amount == 1*10**8 + ){ + "found sale record" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found sale record"))), //BAL0003 + + ]); + + S.run(suite); + + return #success; + }; + + + public shared func testOffers() : async {#success; #fail : Text} { + D.print("running testOffers"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(c_wallet))); //mint to c_wallet + + D.print("finished stage"); + D.print(debug_show(standardStage.0)); + D.print(debug_show(mint_attempt)); + + //ESC0002. try to escrow for the canister; should succeed + //fund a_wallet + D.print("funding result start a_wallet"); + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + D.print("funding result end"); + D.print(debug_show(funding_result)); + + + + //send a payment to the ledger + D.print("sending tokens to canisters"); + let a_ledger_balance_before_escrow = await a_wallet.ledger_balance(Principal.fromActor(dfx), Principal.fromActor(a_wallet)); + D.print("the a ledger balance" # debug_show(a_ledger_balance_before_escrow)); + + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (1 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + D.print("send to canister a"); + D.print(debug_show(a_wallet_send_tokens_to_canister)); + + + ////ESC0001 + + D.print("Sending real escrow now"); + + D.print("Sending real escrow now"); + let a_wallet_try_escrow_specific_staged = await a_wallet.try_escrow_specific_staged(Principal.fromActor(c_wallet), Principal.fromActor(canister), Principal.fromActor(dfx), null, 1 * 10 ** 8, "1", null, ?#ic({ + canister= Principal.fromActor(dfx); + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}), null); + + D.print("try escrow genreal stage"); + D.print(debug_show(a_wallet_try_escrow_specific_staged)); + + + + //check balance and make sure we see the escrow BAL0002 + let a_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let a_ledger_balance_after_escrow = await a_wallet.ledger_balance(Principal.fromActor(dfx), Principal.fromActor(a_wallet)); + let c_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + + D.print("the a balance" # debug_show(a_balance)); + D.print("the a ledger balance" # debug_show(a_ledger_balance_after_escrow)); + D.print("the c balance" # debug_show(c_balance)); + + //have b try to reject the escrow ....should fail + + //canister should have an offer + let c_wallet_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + + let reject_wrong_seller = await b_wallet.try_escrow_reject( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(c_wallet), + "1", + null + ); + + + D.print("reject_wrong_seller" # debug_show(reject_wrong_seller)); + + //MKT0014 todo: check the transaction record and confirm the gensis reocrd + + //BAL0005 + let a_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let a_ledger_balance2 = await a_wallet.ledger_balance(Principal.fromActor(dfx), Principal.fromActor(a_wallet)); + + D.print("the a balance 2 " # debug_show(a_balance2)); + D.print("the a ledger balance 2" # debug_show(a_ledger_balance2)); + D.print("c_balance" # debug_show(c_balance)); + + //BAL0003 + let c_wallet_balance_2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + + //have the owner reject the offer + + let reject_right_seller = await c_wallet.try_escrow_reject( + Principal.fromActor(canister), + Principal.fromActor(a_wallet), + Principal.fromActor(dfx), + Principal.fromActor(c_wallet), + "1", + null + ); + + D.print("reject_right_seller" # debug_show(reject_right_seller)); + + + let a_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + + let a_ledger_balance3 = await a_wallet.ledger_balance(Principal.fromActor(dfx), Principal.fromActor(a_wallet)); + let c_balance2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + + D.print("the a balance 3 " # debug_show(a_balance3)); + D.print("the a balance 2 " # debug_show(c_balance2)); + D.print("the a ledger balance 3 " # debug_show(a_ledger_balance3)); + + //refresh removes the offer + let c_refresh = await c_wallet.try_offer_refresh(Principal.fromActor(canister)); + + D.print("c_refresh3 " # debug_show(c_refresh)); + + let c_balance3 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + + D.print("c_balance3 " # debug_show(c_balance3)); + //test balances + + let suite = S.suite("test market Nft", [ + S.test("fail if b can reject", switch(reject_wrong_seller){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MINT0026 + + S.test("c has offer", switch(c_balance){case(#ok(res)){ + D.print("testing sale balance 2"); + D.print(debug_show(res)); + + + if(res.offers.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + "found a record" + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found a record"))), //todo: NFT-118 + + S.test("does not gets money back after wrong reject", if(a_ledger_balance_after_escrow.e8s == a_ledger_balance2.e8s){ + "correct amount"; + } else { + "wrong amount " # Nat.toText(Nat64.toNat(a_ledger_balance_before_escrow.e8s)) # " " # Nat.toText(Nat64.toNat(a_ledger_balance3.e8s)); + } + , M.equals(T.text("correct amount"))), + S.test("a gets money back after reject", if(a_ledger_balance_before_escrow.e8s - 200000 - 200000 - 200000 == a_ledger_balance3.e8s){ //original balance = should equal the refund + fee for depoist + fee for claim + fee to send back + "correct amount"; + } else { + "wrong amount " # Nat.toText(Nat64.toNat(a_ledger_balance_before_escrow.e8s)) # " " # Nat.toText(Nat64.toNat(a_ledger_balance3.e8s)); + } + , M.equals(T.text("correct amount"))), + + S.test("c has no offer", switch(c_balance3){case(#ok(res)){ + D.print("testing offer balance 3"); + D.print(debug_show(res)); + + + if(res.offers.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + "found a record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found empty record"))), + S.test("a has no escrow after", switch(a_balance3){case(#ok(res)){ + D.print("testing sale balance 2"); + D.print(debug_show(res)); + + + if(res.offers.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + "found a record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found empty record"))), + + + + ]); + + S.run(suite); + + return #success; + }; + +} \ No newline at end of file diff --git a/src/tests/test_runner_nft_2.mo b/src/tests/test_runner_nft_2.mo new file mode 100644 index 0000000..f4e73ad --- /dev/null +++ b/src/tests/test_runner_nft_2.mo @@ -0,0 +1,1042 @@ +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import C "mo:matchers/Canister"; +import Conversion "mo:candy_0_1_10/conversion"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import D "mo:base/Debug"; +import Blob "mo:base/Blob"; +import M "mo:matchers/Matchers"; +import NFTUtils "../origyn_nft_reference/utils"; +import Metadata "../origyn_nft_reference/metadata"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Nat "mo:base/Nat"; +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import TestWalletDef "test_wallet"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; +import utils "test_utils"; + + +shared (deployer) actor class test_runner(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var ledger_fee = 200_000; + + private func get_time() : Int{ + return Time.now(); + }; + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + + //let Instant_Test = await Instant.test_runner_instant_transfer(); + + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + + let suite = S.suite("test nft", [ + S.test("testMint", switch(await testMint()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testStage", switch(await testStage()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testOwnerAndManager", switch(await testOwnerAndManager()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testBuyItNow", switch(await testBuyItNow()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + ]); + S.run(suite); + + return #success; + }; + + // MINT0002 + // MINT0003 + public shared func testOwnerAndManager() : async {#success; #fail : Text} { + D.print("running testOwner"); + + let owner = Principal.toText(Principal.fromActor(this)); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let suite = S.suite("test owner and manager", [ + + S.test("owner is found", ( + switch(await canister.collection_nft_origyn(null)){ + case(#err(err)){ + "unexpected error" # debug_show(err); + }; + case(#ok(res)){ + switch(res.owner){ + case(null){"no owner"}; + case(?val){Principal.toText(val);}; + }; + + }; + } ), M.equals(T.text(owner))), + S.test("manager is found", switch(await canister.collection_nft_origyn(null)){ + case(#err(err)){ + 99999; + }; + case(#ok(res)){ + switch(res.managers){ + case(null){88888}; + case(?val){val.size()}; + }; + + }; + } , M.equals(T.nat(0))), + ]); + + S.run(suite); + + return #success; + }; + + //MINT0004, MINT0005, MINT0006, MINT0007, MINT0008, MINT0009, MINT0010, MINT0011, MINT0013, MINT0014, MINT0016, MINT0017, MINT0018 + public shared func testStage() : async {#success; #fail : Text} { + D.print("running teststage"); + + let a_wallet = await TestWalletDef.test_wallet(); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + //MINT0014 + let a_wallet_try_publish = await a_wallet.try_publish_meta(Principal.fromActor(canister)); + + D.print("calling stage"); + + //MINT0007, MINT0008 + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(this), 1024, false); + D.print("finished stage"); + D.print(debug_show(standardStage.0)); + + let test_metadata = await canister.nft_origyn("1"); + + + //MINT0016 + let a_wallet_try_file_publish = await a_wallet.try_publish_chunk(Principal.fromActor(canister)); + + //MINT0018 + let a_wallet_try_get_nft = await a_wallet.try_get_nft(Principal.fromActor(canister), "1"); + + //MINT0025 + let a_wallet_try_get_bearer = await a_wallet.try_get_bearer(Principal.fromActor(canister)); + + + //MINT0009 + D.print("this one should have content"); + D.print(debug_show(Principal.fromActor(canister))); + let fileStage2 = await canister.stage_library_nft_origyn({ + token_id = "1" : Text; + library_id = "page" : Text; + filedata = #Empty; + chunk = 1; + content = Conversion.valueToBlob(#Text("nice to meet you")); + }); + + //MINT0019 - you can now upload here but must provide proper metadata and have storagebthis will fail with id not found + let fileStage3 = await canister.stage_library_nft_origyn({ + token_id = "1" : Text; + library_id = "1" : Text; + filedata = #Empty; + chunk = 1; + content = Conversion.valueToBlob(#Text("nice to meet you")); + }); + + D.print("trying to upload before meta" # debug_show(fileStage3)); + //MINT0010 + let fileStageResult = await canister.chunk_nft_origyn({token_id = "1"; library_id = "page"; chunk = ?0;}); + D.print(debug_show(fileStageResult)); + + let fileStageResult2 = await canister.chunk_nft_origyn({token_id = "1"; library_id = "page"; chunk = ?1;}); + D.print(debug_show(fileStageResult2)); + + + + let fileStageResultDenied = switch(await a_wallet.try_get_chunk(Principal.fromActor(canister),"1","page",0)){ + case(#ok(data)){ + "Should not have returned data"; + }; + case(#err(data)){ + "Proper Error occured"; + }; + }; + + D.print("filestage result finished"); + //MINT0004 + let fail_stage_because_id = await canister.stage_nft_origyn({metadata = #Class([ + + {name = "primary_asset"; value=#Text("page"); immutable= true}, + {name = "preview"; value=#Text("page"); immutable= true}, + {name = "experience"; value=#Text("page"); immutable= true}, + {name = "hidden"; value=#Text("page"); immutable= true}, + {name = "library"; value=#Array(#thawed([ + #Class([ + {name = "library_id"; value=#Text("page"); immutable= true}, + {name = "title"; value=#Text("page"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("https://" # Principal.toText(Principal.fromActor(canister)) # ".raw.ic0.app/_/1/_/page"); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(4); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + ]) + ])); immutable= true}, + {name = "owner"; value=#Principal(Principal.fromActor(canister)); immutable= false} + ])}); + + D.print("fail_stage_because_id result finished"); + + //MINT0006 + let fail_stage_because_system = await canister.stage_nft_origyn({metadata = #Class([ + {name = "id"; value=#Text("2"); immutable= true}, + {name = "primary_asset"; value=#Text("page"); immutable= true}, + {name = "preview"; value=#Text("page"); immutable= true}, + {name = "experience"; value=#Text("page"); immutable= true}, + {name = "library"; value=#Array(#thawed([ + #Class([ + {name = "library_id"; value=#Text("page"); immutable= true}, + {name = "title"; value=#Text("page"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("https://" # Principal.toText(Principal.fromActor(canister)) # ".raw.ic0.app/_/1/_/page"); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(4); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + ]) + ])); immutable= true}, + {name = "owner"; value=#Principal(Principal.fromActor(canister)); immutable= false}, + //below is what we are testing + {name = "__system"; value=#Class([ + {name = "status"; value=#Text("minted"); immutable=false;} + ]); immutable = false} + ])}); + + D.print("fail_stage_because_system result finished"); + + + //MINT0005 + let test_metadata_replace_command = await canister.stage_nft_origyn({metadata = #Class([ + {name = "id"; value=#Text("1"); immutable= true}, + {name = "primary_asset"; value=#Text("page2"); immutable= true}, + {name = "preview"; value=#Text("page"); immutable= true}, + {name = "experience"; value=#Text("page"); immutable= true}, + {name = "library"; value=#Array(#thawed([ + #Class([ + {name = "library_id"; value=#Text("page2"); immutable= true}, + {name = "title"; value=#Text("page"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("https://" # Principal.toText(Principal.fromActor(canister)) # ".raw.ic0.app/_/1/_/page"); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(4); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name = "read"; value=#Text("public"); immutable= false}, + + ]) + ])); immutable= true}, + {name = "owner"; value=#Principal(Principal.fromActor(canister)); immutable= false} + ])}); + + D.print("result from trying to replace the nft was"); + D.print(debug_show(test_metadata_replace_command)); + + let test_metadata_replace = await canister.nft_origyn("1"); + + let suite = S.suite("test staged Nft", [ + + S.test("retult is id", switch(standardStage.0){case(#ok(res)){res};case(#err(err)){"error"};}, M.equals(T.text("1"))), //MINT0007 + S.test("fail if no id", switch(fail_stage_because_id){case(#ok(res)){res};case(#err(err)){"fail_expected"};}, M.equals(T.text("fail_expected"))), //MINT0004 + S.test("fail if __system", switch(fail_stage_because_system){case(#ok(res)){res};case(#err(err)){"fail_expected"};}, M.equals(T.text("fail_expected"))), //MINT0006 + S.test("file stage succeded", switch(standardStage.1){case(#ok(res)){Principal.toText(res)};case(#err(err)){"aaaaa-aa"}}, M.equals(T.text(Principal.toText(Principal.fromActor(canister))))), //MINT0008 + S.test("file stage query works", switch(fileStageResult){ + case(#ok(res)){ + switch(res){ + case(#remote(redirect)){"unexpected redirect"}; + case(#chunk(res)){ + + if((switch(res.current_chunk){case(?val){val};case(null){9999999}}) + 1 == res.total_chunks){ + "unexpectd eof chunks"; + } else { + Conversion.bytesToText(Blob.toArray(res.content)); + } + }; + }; + + }; + case(#err(err)){err.flag_point}; + }, M.equals(T.text("hello world"))), //MINT0006 + S.test("file stage query works", switch(fileStageResult2){ + case(#ok(res)){ + switch(res){ + case(#remote(redirect)){"unexpected redirect"}; + case(#chunk(res)){ + if((switch(res.current_chunk){case(?val){val};case(null){9999999}}) + 1 == res.total_chunks){ + Conversion.bytesToText(Blob.toArray(res.content)); + } else { + "unexpecte not eof"; + } + }; + }; + + }; + case(#err(err)){err.flag_point}; + }, M.equals(T.text("nice to meet you"))), //MINT0009 + S.test("file stage cannot be viewed by non owner", fileStageResultDenied, M.equals(T.text("Proper Error occured"))), //MINT0011 + S.test("file stage reports chunks", switch(fileStageResult2){ + case(#ok(res)){ + switch(res){ + case(#remote(redirect)){999999}; + case(#chunk(res)){ + res.total_chunks; + }; + }; + }; + case(#err(err)){999}; + }, M.equals(T.nat(2))), //MINT0013 + S.test("cant publish metadata for someone else", switch(a_wallet_try_publish){ + case(#ok(res)){ + "shoundnt be able to publish" + }; + case(#err(err)){ + D.print(debug_show(err)); + err.text; + }; + }, M.equals(T.text("unauthorized access"))), //MINT0014 + S.test("cant publish file for someone else", switch(a_wallet_try_file_publish){ + case(#ok(res)){ + "shoundnt be able to publish" + }; + case(#err(err)){ + D.print(debug_show(err)); + err.text; + }; + }, M.equals(T.text("unauthorized access"))), //MINT0016 + S.test("can see metadata after I stage", switch(test_metadata){ + case(#ok(res)){ + switch(res.metadata){ + case(#Class(data)){ + if(data.size() ==12){ //check if a top level element was added to the structure + "Ok"; + } else { + D.print("testing size"); + D.print(debug_show(test_metadata)); + D.print(debug_show(data)); + D.print(debug_show(data.size())); + "data elements don't match wanted 9 found " # debug_show(data.size()); + } + }; + case (_){ + "should have returned a class"; + }; + }; + }; + case(#err(err)){ + D.print("error stage"); + D.print(debug_show(err)); + "shoundnt have an error"; + }; + }, M.equals(T.text("Ok"))), //MINT0017 + S.test("can't see metadata after stage from wallet", switch(a_wallet_try_get_nft){ + case(#ok(res)){ + "shoundnt be able to get" + }; + case(#err(err)){ + D.print(debug_show(err)); + err.text; + }; + }, M.equals(T.text("Cannot find token."))), //MINT0018 + S.test("can't see bearer after stage from wallet", switch(a_wallet_try_get_bearer){ + case(#ok(res)){ + "shoundnt be able to get" + }; + case(#err(err)){ + D.print(debug_show(err)); + err.text; + }; + }, M.equals(T.text("Cannot find token."))), //MINT0025 + S.test("can update metadata", switch(test_metadata_replace){ + case(#ok(res)){ + switch(Properties.getClassProperty(res.metadata,"primary_asset")){ + case(null){ + "should have this property"; + }; + case(?val){ + Conversion.valueToText(val.value); + } + } + }; + case(#err(err)){ + D.print("err for test metadata"); + D.print(debug_show(err)); + "shoundnt error" + }; + }, M.equals(T.text("page2"))), //MINT0005 + S.test("cant upload library_id that doesnt exist metadata", switch(fileStage3){ + case(#ok(res)){ + "that should not have worked because the library id wasnt planed and doesnt have storage" + }; + case(#err(err)){ + if(err.number == 1001){ + "correct number" + } else{ + "wrong error" # debug_show(err.number); + } + }; + }, M.equals(T.text("correct number"))), //MINT0019 + ]); + + S.run(suite); + + return #success; + + + + }; + + + + //MINT0021, MINT0001, MINT0024, MINT0022 + public shared func testMint() : async {#success; #fail : Text} { + D.print("running testmint"); + + debug{ D.print(debug_show(Principal.fromActor(this)))}; + + let a_wallet = await TestWalletDef.test_wallet(); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + D.print("a mint canister"); + D.print(debug_show(Principal.fromActor(canister))); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(this), 1024, false); + + let fileStage2 = await canister.stage_library_nft_origyn({ + token_id = "1" : Text; + library_id = "page" : Text; + filedata = #Empty; + chunk = 1; + content = Conversion.valueToBlob(#Text("nice to meet you")); + }); + + D.print("after file stage"); + + //MINT0021 + let a_wallet_try_mint = await a_wallet.try_mint(Principal.fromActor(canister)); + D.print("a wallet try mint"); + + //Mint0001 + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(a_wallet))); + + D.print("mint attempt"); + + //MINT0024 + let bearer_attempt = await canister.bearer_nft_origyn("1"); + + D.print("berer attempt"); + + //MINT0022 + let view_after_mint_attempt = await canister.nft_origyn("1"); + + D.print("view after mint"); + + let suite = S.suite("test staged Nft", [ + + S.test("fail if non owner mints", switch(a_wallet_try_mint){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ + "correct number" + } else{ + "wrong error"; + }};}, M.equals(T.text("correct number"))), //MINT0021 + S.test("owner can mint", switch(mint_attempt){case(#ok(res)){res};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("1"))), //MINT0001 + S.test("user can see nft after mint", switch(view_after_mint_attempt){case(#ok(res)){"worked"};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("worked"))), //MINT0022 + S.test("creator can assign owner on mint", switch(bearer_attempt){case(#ok(res)){ + switch(res){ + case(#principal(res)){Principal.toText(res)}; + case(_){"unexpected account type" # debug_show(res)}; + };} + ;case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text(Principal.toText(Principal.fromActor(a_wallet))))), //MINT0024 + + ]); + + S.run(suite); + + return #success; + + + + }; + + public shared func testBuyItNow() : async {#success; #fail : Text} { + D.print("running testBuyItNow"); + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + + let dfx2 : DFXTypes.Service = actor(Principal.toText(dfx_ledger2)); + + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + + let funding_result_a = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + let funding_result_b = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + let funding_result_b2 = await dfx2.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + let funding_result_c = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(c_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + let mode = canister.__set_time_mode(#test); + let atime = canister.__advance_time(Time.now()); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(this), 1024, false); //for auctioning a minted item + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(this), 1024, false); //for auctioning an unminted item + + D.print("Minting"); + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(this))); //mint to the test account + let mint_attempt2 = await canister.mint_nft_origyn("2", #principal(Principal.fromActor(this))); //mint to the test account + + + D.print("start auction owner"); + //start an auction by owner + let start_auction_attempt_owner = await canister.market_transfer_nft_origyn({token_id = "1"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(10 * 10 ** 8); + token = #ic({ + canister = Principal.fromActor(dfx); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(10 * 10 ** 8); + start_price = (10 * 10 ** 8); + start_date = 0; + ending = #date(get_time() + DAY_LENGTH); + min_increase = #amount(10*10**8); + allow_list = ?[Principal.fromActor(a_wallet), Principal.fromActor(b_wallet)]; + }; + }; } ); + + D.print("get sale id"); + let current_sales_id = switch(start_auction_attempt_owner){ + case(#ok(val)){ + switch(val.txn_type){ + case(#sale_opened(sale_data)){ + sale_data.sale_id; + }; + case(_){ + D.print("Didn't find expected sale_opened"); + return #fail("Didn't find expected sale_opened"); + } + }; + + }; + case(#err(item)){ + D.print("error with auction start"); + return #fail("error with auction start"); + }; + }; + + + //fund c to send an invalid bid + let c_wallet_send_tokens_to_canister = await c_wallet.send_ledger_payment(Principal.fromActor(dfx), (10 * 10 ** 8 ) + 200000, Principal.fromActor(canister)); + + let block_c = switch(c_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending real escrow now"); + let c_wallet_try_escrow_general_staged = await c_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 10 * 10 ** 8, "1", ?current_sales_id, null, null); + + //place a bid by an invalid user + let c_wallet_try_bid_valid = await c_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 10*10**8, "1", current_sales_id, null); + + + let c_balance_after_bad_bid = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + + + + //place escrow + D.print("sending tokens to canisters"); + let a_wallet_send_tokens_to_canister = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), (10 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + let block = switch(a_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending real escrow now"); + let a_wallet_try_escrow_general_staged = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, 10 * 10 ** 8, "1", ?current_sales_id, null, null); + + D.print("should be done now"); + let a_balance_before_first = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("the balance before first is"); + D.print(debug_show(a_balance_before_first)); + + //place a bid below start price + + let a_wallet_try_bid_below_start = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), 1*10**7, "1", current_sales_id, null); + + //todo: bid should be refunded + + let a_balance_after_bad_bid = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance " # debug_show(a_balance_after_bad_bid)); + + //restake after refund + D.print("sending tokens to canisters 3"); + let a_wallet_send_tokens_to_canister2b = await a_wallet.send_ledger_payment(Principal.fromActor(dfx), ((10 * 10 ** 8) + 1) + 200000, Principal.fromActor(canister)); + + D.print("sending tokens after refund" # debug_show(a_wallet_send_tokens_to_canister2b)); + let block2 = switch(a_wallet_send_tokens_to_canister2b){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending real escrow now 2"); + let a_wallet_try_escrow_general_staged2b = await a_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, (10 * 10 ** 8) + 1, "1", ?current_sales_id, null, null); + + + let a_balance_after_bad_bid2 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance 2 " # debug_show(a_balance_after_bad_bid2)); + + //try a bid in th wrong currency + //place escrow + D.print("sending tokens to canisters"); + let b_wallet_send_tokens_to_canister = await b_wallet.send_ledger_payment(Principal.fromActor(dfx2), (10 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + let block2b = switch(b_wallet_send_tokens_to_canister){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending escrow for wrong currency escrow now"); + let b_wallet_try_escrow_wrong_currency = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx2), null, 10 * 10 ** 8, "1", ?current_sales_id, null, null); + + + //place a bid wiht wrong asset MKT0023 + let b_wallet_try_bid_wrong_asset = await b_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx2), 10*10**8, "1", current_sales_id, null); + + //try starting again//should fail MKT0018 + let end_date = get_time() + DAY_LENGTH; + D.print("end date is "); + D.print(debug_show(end_date)); + //todo: write test + + + //place a valid bid MKT0027 + let a_wallet_try_bid_valid = await a_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), (10*10**8) + 1, "1", current_sales_id, null); + + + let a_balance_after_bad_bid4 = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + D.print("a balance 4 " # debug_show(a_balance_after_bad_bid2)); + + //check transaction log for bid MKT0033, TRX0005 + let a_history_1 = await canister.history_nft_origyn("1", null, null); //gets all history + + //check transaction log for bid MKT0033, TRX0005 + let a_balance = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); //gets all history + + D.print("withdraw during bid"); + //todo: attempt to withdraw escrow but it should be gone + let a_withdraw_during_bid = await a_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(a_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", 1 * 10 ** 8, null); + + D.print("passed this"); + //place escrow b + let new_bid_val = 12*10**8; + + //try a bid in th wrong currency + //place escrow + D.print("sending tokens to canisters"); + let b_wallet_send_tokens_to_canister_correct_ledger = await b_wallet.send_ledger_payment(Principal.fromActor(dfx), new_bid_val + 200000, Principal.fromActor(canister)); + + D.print("did the payment? "); + D.print(debug_show(b_wallet_send_tokens_to_canister_correct_ledger)); + + let block2_b = switch(b_wallet_send_tokens_to_canister_correct_ledger){ + case(#ok(ablock)){ + ablock; + }; + case(#err(other)){ + D.print("ledger didnt work"); + return #fail("ledger didnt work"); + }; + }; + + D.print("Sending escrow for correct currency escrow now"); + let b_wallet_try_escrow_correct_currency = await b_wallet.try_escrow_specific_staged(Principal.fromActor(this), Principal.fromActor(canister), Principal.fromActor(dfx), null, new_bid_val, "1", ?current_sales_id, null, null); + + D.print("did the deposit work? "); + D.print(debug_show(b_wallet_try_escrow_correct_currency)); + + + let b_balance_after_deposit = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); //gets all history + + + + //place a second bid - should fail since closed + let b_wallet_try_bid_valid = await b_wallet.try_bid(Principal.fromActor(canister), Principal.fromActor(this), Principal.fromActor(dfx), new_bid_val, "1", current_sales_id, null); + + + //advance time + let time_result = await canister.__advance_time(end_date + 1); + D.print("new time"); + D.print(debug_show(time_result)); + + + //end again, should fail + let end_again = await canister.sale_nft_origyn(#end_sale("1")); + D.print("end again"); + D.print(debug_show(end_again)); + + //try to withdraw winning bid NFT-110 + let a_withdraw_during_win = await a_wallet.try_escrow_withdraw(Principal.fromActor(canister), Principal.fromActor(a_wallet), Principal.fromActor(dfx), Principal.fromActor(this), "1", 101 * 10 ** 8, null); + + //NFT-94 check ownership + //check balance and make sure we see the nft + let a_balance_after_close = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + // //MKT0029, MKT0036 + let a_sale_status_over_new_owner = await canister.nft_origyn("1"); + + //check transaction log for sale + let a_history_3 = await canister.history_nft_origyn("1", null, null); //gets all history + + let suite = S.suite("test staged Nft", [ + + S.test("test mint attempt", switch(mint_attempt){case(#ok(res)){ + + "correct response"; + + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), + S.test("auction is started", switch(start_auction_attempt_owner){case(#ok(res)){ + switch(res.txn_type){ + case(#sale_opened(details)){ + "correct response"; + }; + case(_){ + "bad transaction type"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //MKT0021 + S.test("fail if bid not on allow list", switch(c_wallet_try_bid_valid){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("nft balance after sale to bad user is 0", switch(c_balance_after_bad_bid){case(#ok(res)){ + D.print("testing nft balance"); + D.print(debug_show(res)); + + + if(res.nfts.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + if(res.nfts[res.nfts.size()-1] == "1"){ + "found a record" + }else { + "didnt find record" + }; + + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found empty record"))), //todo: NFT-94 + + S.test("fail if bid too low", switch(a_wallet_try_bid_below_start){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4004){ //below bid price + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0023 + S.test("fail if wrong asset", switch(b_wallet_try_bid_wrong_asset){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 4002){ //wrong asset + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //MKT0024 + S.test("bid is succesful", switch(a_wallet_try_bid_valid){case(#ok(res)){ + D.print("as bid"); + D.print(debug_show(a_wallet_try_bid_valid)); + switch(res.txn_type){ + case(#sale_ended(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == ((10*10**8) + 1) and + (switch(details.sale_id){case(null){"x"};case(?val){val}}) == current_sales_id and + Types.account_eq(details.seller, #principal(Principal.fromActor(this))) and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + D.print("bad transaction bid " # debug_show(res)); + "bad transaction bid"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //MKT0027 + S.test("transaction history has the bid", switch(a_history_1){case(#ok(res)){ + + D.print("where ismy history"); + D.print(debug_show(a_history_1)); + switch(res[res.size()-1].txn_type){ + case(#sale_ended(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == ((10*10**8) + 1) and + details.sale_id == ?current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history bid"; + }; + } + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("correct response"))), //TRX0005, MKT0033 + S.test("auction winner is the new owner", switch(a_sale_status_over_new_owner){case(#ok(res)){ + + let new_owner = switch(Metadata.get_nft_owner( + switch (a_sale_status_over_new_owner){ + case(#ok(item)){ + item.metadata; + }; + case(#err(err)){ + #Empty; + }; + })){ + case(#err(err)){ + #account_id("wrong"); + }; + case(#ok(val)){ + val; + }; + }; + D.print("new owner"); + D.print(debug_show(new_owner)); + D.print(debug_show(Principal.fromActor(a_wallet))); + if(Types.account_eq(new_owner, #principal(Principal.fromActor(a_wallet)))){ + "found correct owner" + } else { + D.print(debug_show(res)); + "didnt find record " + }};case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found correct owner"))), //MKT0029 + S.test("current sale status is ended", switch(a_sale_status_over_new_owner){case(#ok(res)){ + D.print("a_sale_status_over_new_owner"); + D.print(debug_show(a_sale_status_over_new_owner)); + //MKT0036 sale should be over and there should be a record with status #ended + switch (a_sale_status_over_new_owner){ + case(#ok(res)){ + + switch(res.current_sale){ + case(null){ + "current sale improperly removed" + }; + case(?val){ + switch(val.sale_type){ + case(#auction(state)){ + D.print("state"); + D.print(debug_show(state)); + let current_status = switch(state.status){case(#closed){true;};case(_){false}}; + if(current_status == true and + val.sale_id == current_sales_id){ + "found closed sale"; + } else { + "didnt find closed sale"; + }; + + }; + + }; + }; + }; + + }; + case(#err(err)){ + "error getting"; + }; + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found closed sale"))), // MKT0036 + + + + S.test("transaction history have the transfer", + switch(a_history_3){ + case(#ok(res)){ + + + switch(res[res.size()-1].txn_type){ + case(#sale_ended(details)){ + if(Types.account_eq(details.buyer, #principal(Principal.fromActor(a_wallet))) and + details.amount == ((10*10**8) + 1) and + details.sale_id == ?current_sales_id and + Types.token_eq(details.token, #ic({ + canister = (Principal.fromActor(dfx)); + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}))){ + "correct response"; + } else { + "details didnt match" # debug_show(details); + }; + }; + case(_){ + "bad history sale"; + }; + }; + + }; + case(#err(err)){"unexpected error: " # err.flag_point}; + }, M.equals(T.text("correct response"))), //todo: make a user story for adding a #sale_ended to the end of transaction log + S.test("fail if auction already over ", switch(end_again){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 2000){ //new owner so unauthorized + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //todo: create user story for sale over + S.test("fail if escrow is for the current winning bid", switch(a_withdraw_during_bid){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //no escrow found + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-76 + + S.test("fail if escrow is for the winning bid a withdraw", switch(a_withdraw_during_win){case(#ok(res)){"unexpected success"};case(#err(err)){ + if(err.number == 3000){ //wont be able to find it because it has been zeroed out. + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), // NFT-110 + S.test("nft balance after sale", switch(a_balance_after_close){case(#ok(res)){ + D.print("testing nft balance"); + D.print(debug_show(res)); + + + if(res.nfts.size() == 0){ + "found empty record" + } else { + D.print(debug_show(res)); + if(res.nfts[res.nfts.size()-1] == "1"){ + "found a record" + }else { + "didnt find record" + }; + + }; + };case(#err(err)){"unexpected error: " # err.flag_point};}, M.equals(T.text("found a record"))), //todo: NFT-94 + + + + + ]); + + S.run(suite); + + return #success; + + + + }; + + + +} \ No newline at end of file diff --git a/src/tests/test_runner_sale.mo b/src/tests/test_runner_sale.mo new file mode 100644 index 0000000..95e07e2 --- /dev/null +++ b/src/tests/test_runner_sale.mo @@ -0,0 +1,3248 @@ +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Blob "mo:base/Blob"; +import C "mo:matchers/Canister"; +import Conversion "mo:candy_0_1_10/conversion"; +import D "mo:base/Debug"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import Error "mo:base/Error"; +import M "mo:matchers/Matchers"; +import Metadata "../origyn_nft_reference/metadata"; +import NFTUtils "../origyn_nft_reference/utils"; +import Nat "mo:base/Nat"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import S "mo:matchers/Suite"; +import Sales "../origyn_sale_reference/main"; +import SalesCanister "../origyn_sale_reference/main"; +import T "mo:matchers/Testable"; +import TestWalletDef "test_wallet"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; +import utils "test_utils"; +//import Instant "test_runner_instant_transfer"; + + +shared (deployer) actor class test_runner_sale(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + + private type canister_factory = actor { + create : (Principal) -> async Principal; + }; + + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var dip20_fee = 200_000; + + private var dfx_token_spec = #ic({ + canister= dfx_ledger; + standard=#Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000;}); + + private func get_time() : Int{ + return Time.now(); + }; + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + + //let Instant_Test = await Instant.test_runner_instant_transfer(); + + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + D.print("in test"); + + let suite = S.suite("test nft", [ + S.test("testLoadNFTs", switch(await testLoadNFTS()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testManagement", switch(await testManagement()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testAllocation", switch(await testAllocation()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testRedeemAllocation", switch(await testRedeemAllocation()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testRegistration", switch(await testRegistration()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testReservation", switch(await testReservation()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testReservationNoReg", switch(await testReservationNoReg()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + ]); + S.run(suite); + + return #success; + }; + + public shared func testReservation() : async {#success; #fail : Text} { + D.print("running testReservation"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + let d_wallet = await TestWalletDef.test_wallet(); + let e_wallet = await TestWalletDef.test_wallet(); + let f_wallet = await TestWalletDef.test_wallet(); + + D.print("have wallets"); + + //fund wallets + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_2 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_5 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(c_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_3 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(d_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_4 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(e_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_6 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(f_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + D.print("have canister"); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + let standardStage4 = await utils.buildStandardNFT("4", canister, Principal.fromActor(canister), 1024, false); + let standardStage5 = await utils.buildStandardNFT("5", canister, Principal.fromActor(canister), 1024, false); + let standardStage6 = await utils.buildStandardNFT("6", canister, Principal.fromActor(canister), 1024, false); + let standardStage7 = await utils.buildStandardNFT("7", canister, Principal.fromActor(canister), 1024, false); + let standardStage8 = await utils.buildStandardNFT("8", canister, Principal.fromActor(canister), 1024, false); + + let registration_date = Time.now() + 100000000000; + let allocation_date = Time.now() + 900000000000; + let lock_until = allocation_date + 900000000000; + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = ?Principal.fromActor(canister); + sale_open_date = ?(allocation_date); // in 15 minutes + registration_date = ?registration_date; + end_date = null; + required_lock_date = ?(lock_until); //15 minutes past allocation date + + }); + + let manager_add = await canister.collection_update_nft_origyn(#UpdateManagers([Principal.fromActor(sale_canister)])); + //D.print("manager add" # debug_show(manager_add)); + + + + + D.print("adding unminted"); + + let add_unminted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "7"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "8"; + }), + ] + ); + + //we will allocate one specific to a + //we will allocate one group to a + + //we will have a register one and then buy one after + + + //we will allocate five to group b + + //we will have b regiser for 4 + + //b regiters for 2 + //b buy 2 + + //have c/d try to buy + + //create a defalut group with an allocation of 2 + + let defaultGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = ""; //default namespace + members = null; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000 : Nat; + symbol = "OGY"; + decimals = 8 : Nat; + standard = #Ledger; + }); + }]; + allowed_amount = ?2; + tier = 0; + additive = true; + } + )]); + + //create a specific group with allocation of 2 for b wallet + let aGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "agroup"; //default namespace + members = ?[Principal.fromActor(a_wallet)]; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?1; + additive = true; + tier = 1; + } + )]); + + //put b and c in b group + + //create a specific group with allocation of 2 for b wallet + let bGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "bgroup"; //default namespace + members = ?[Principal.fromActor(b_wallet),Principal.fromActor(c_wallet)]; + pricing = ? [#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?2; + additive = true; + tier = 2 + } + )]); + + //set up reservations: + + // allocate "1" to a_wallet + let a_principal_request = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = Principal.toText(Principal.fromActor(a_wallet)) # "individual"; + reservation_type = #Principal(Principal.fromActor(a_wallet)); + nfts : [Text] = ["1" : Text]; + exclusive = true; + } + ) + ]); + + // allocate "2","3" to a group a + + let a_group_request = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = "agroupreservation"; + reservation_type = #Groups(["agroup"]); + nfts : [Text] = ["2" : Text, "3"]; + exclusive = true; + } + ) + ]); + + // allocate "4,5,6,7,8" to group b + + let b_group_request = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = "bgroupreservation"; + reservation_type = #Groups(["bgroup"]); + nfts : [Text] = ["4", "5", "6", "7", "8"]; + exclusive = true; + } + ) + ]); + + D.print("finished group requets" # debug_show(b_group_request)); + + + + //leave d out but try to get one anyway...should fail + + let aRedeem_payment_2 = await a_wallet.send_ledger_payment(dfx_ledger, (30 * 10 ** 8) + 600000, Principal.fromActor(canister)); + + D.print("apayment"# debug_show(aRedeem_payment_2)); + + let a_wallet_try_escrow_general_valid = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(aRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 30 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + D.print("about to try registration"# debug_show(a_wallet_try_escrow_general_valid)); + //register escrow for one NFT + + let a_wallet_try_register_for_one = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(a_wallet); max_desired = 1; escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + D.print("registered for one " # debug_show(a_wallet_try_register_for_one)); + //check that registration is updated + + let a_wallet_registration_after_one = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(a_wallet)); + + //redeem escrow for the two more of the NFTs + + D.print("about to payment b"); + + //register b for 4 with additive + let bRedeem_payment_2 = await b_wallet.send_ledger_payment(dfx_ledger, (40 * 10 ** 8) + 800000, Principal.fromActor(canister)); + + D.print("about to escrow b" # debug_show(bRedeem_payment_2)); + + let b_wallet_try_escrow_general_valid = await b_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(bRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 40 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + D.print("about to register b" # debug_show(b_wallet_try_escrow_general_valid)); + let b_wallet_try_register_for_two = await b_wallet.try_sale_registration(Principal.fromActor(sale_canister), {principal = Principal.fromActor(b_wallet); max_desired = 2; escrow_receipt = switch(b_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //20 icp for two + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + D.print("registered for two " # debug_show(b_wallet_try_register_for_two)); + + + let b_wallet_registration_after_four = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(b_wallet)); + + //regist d for 2 with non-additive but should get allocated none due to reservations + + let dRedeem_payment_2 = await d_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let d_wallet_try_escrow_general_valid = await d_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(dRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + let d_wallet_try_register_for_two= await d_wallet.try_sale_registration(Principal.fromActor(sale_canister), {principal = Principal.fromActor(d_wallet); max_desired = 2; escrow_receipt = switch(d_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //40 icp for four but shold only get two + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + let d_wallet_registration_after_two = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(d_wallet)); + + //advance time + + D.print("registered for two d " # debug_show(d_wallet_registration_after_two)); + + + let advancer = await sale_canister.__advance_time(allocation_date + 1); + + //assure allocation is made + //ways to assure this + //make a new registration? + //make a new allocation? + //reedeem an allocatoin? + + let d_balance_before_allocation = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(d_wallet))); + + //shold be empty + let d_allocate_empty = await d_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(d_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + //should be 2 + let a_allocate_empty = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let a_wallet_try_redeem_for_one = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + D.print("reddem for one a " # debug_show(a_wallet_try_redeem_for_one)); + + + let b_allocate_empty = await b_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(b_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let b_wallet_try_redeem_for_one = await b_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(b_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + D.print("reddem for one b " # debug_show(b_wallet_try_redeem_for_one)); + + + + //should fail event though A qualifes for 3 because it is reserved for c + + let a_allocate_empty_after_two = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let a_wallet_try_redeem_for_third = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + D.print("reddem for third a " # debug_show(a_wallet_try_redeem_for_third)); + + + let cRedeem_payment_2 = await c_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let c_wallet_try_escrow_general_valid = await c_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(cRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + + + let c_allocate_empty_after_two = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let c_wallet_try_redeem_for_one = await c_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(c_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + D.print("redeem for two c " # debug_show(c_wallet_try_redeem_for_one)); + + + + //check that allocation is updated + + let a_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(a_wallet)); + let b_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(b_wallet)); + let c_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(c_wallet)); + let d_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(d_wallet)); + + + D.print("balance after allocation " # debug_show(a_wallet_registration_after_allocation,b_wallet_registration_after_allocation,c_wallet_registration_after_allocation,d_wallet_registration_after_allocation)); + + + //claim + switch( + a_wallet_registration_after_allocation, + b_wallet_registration_after_allocation, + c_wallet_registration_after_allocation, + d_wallet_registration_after_allocation){ + case(#ok(a),#ok(b),#ok(c),#ok(d)){ + for(thisitem in a.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in b.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in c.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in d.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + }; + case(_,_,_,_) { + D.print("THROW ----------------- couldnt get the registratons after allocation" # debug_show(a_wallet_registration_after_allocation,b_wallet_registration_after_allocation,c_wallet_registration_after_allocation,d_wallet_registration_after_allocation)); + throw(Error.reject("THROW ----------------- couldnt get the registratons after allocation")); + }; + }; + + //check nft balance + + let a_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let c_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + let d_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(d_wallet))); + + + D.print("balance after three " # debug_show(a_wallet_balance_after_three,b_wallet_balance_after_three,c_wallet_balance_after_three,d_wallet_balance_after_three)); + + + //try to allocate nfts --- should be out of inventory + + let c_allocate_empty_2_end = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + + let a_allocate_empty_3_end = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + let d_allocate_empty_3_end = await d_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(d_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + + + //D.print("running suite test registrations"); + + let suite = S.suite("test registration", [ + + S.test("can register one item", switch(d_balance_before_allocation){case(#ok(res)){ + if(res.nfts.size() == 0){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("fail d is allocated inventory", switch(d_allocate_empty){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("fail if a is allocated more than 1 inventory", switch(a_allocate_empty){case(#ok(res)){ + if(res.allocation_size == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("b should be allocated two more", switch(a_allocate_empty){case(#ok(res)){ + if(res.allocation_size == 2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("fail d is allocated inventory", switch(a_allocate_empty_after_two){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail a if allocating for a un allocated item", switch(a_wallet_try_redeem_for_third){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5001){ //allocation doesnt exist + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("c can register for one", switch(c_allocate_empty_after_two){case(#ok(res)){ + if(res.allocation_size == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("c can register for one", switch(c_wallet_try_redeem_for_one){case(#ok(res)){ + if(res.nfts.size() == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("only 8 items allocated in balance", switch( + a_wallet_balance_after_three, + b_wallet_balance_after_three, + c_wallet_balance_after_three, + d_wallet_balance_after_three){ + case(#ok(a),#ok(b),#ok(c),#ok(d)){ + if(a.nfts.size() + b.nfts.size() + c.nfts.size() + d.nfts.size() == 8){ + "expected success" + } else { + "unexpected success" # debug_show((a,b,c,d)) + }}; + case(_,_,_,_) { + "wrong error " # debug_show((a_wallet_balance_after_three, b_wallet_balance_after_three, c_wallet_balance_after_three, d_wallet_balance_after_three)); + }; + }, M.equals(T.text("expected success"))), + + S.test("fail if is allocated inventory", switch(c_allocate_empty_2_end){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("fail if is allocated inventory", switch(a_allocate_empty_3_end){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("fail if is allocated inventory", switch(d_allocate_empty_3_end){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testReservationNoReg() : async {#success; #fail : Text} { + D.print("running testRegistration"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + let d_wallet = await TestWalletDef.test_wallet(); + let e_wallet = await TestWalletDef.test_wallet(); + let f_wallet = await TestWalletDef.test_wallet(); + + D.print("have wallets"); + + //fund wallets + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_2 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_5 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(c_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_3 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(d_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_4 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(e_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_6 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(f_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + D.print("have canister"); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + let standardStage4 = await utils.buildStandardNFT("4", canister, Principal.fromActor(canister), 1024, false); + let standardStage5 = await utils.buildStandardNFT("5", canister, Principal.fromActor(canister), 1024, false); + let standardStage6 = await utils.buildStandardNFT("6", canister, Principal.fromActor(canister), 1024, false); + let standardStage7 = await utils.buildStandardNFT("7", canister, Principal.fromActor(canister), 1024, false); + let standardStage8 = await utils.buildStandardNFT("8", canister, Principal.fromActor(canister), 1024, false); + + let registration_date = Time.now() + 100000000000; + let allocation_date = Time.now() + 900000000000; + let lock_until = allocation_date + 900000000000; + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = ?Principal.fromActor(canister); + sale_open_date = ?(allocation_date); // in 15 minutes + registration_date = ?registration_date; + end_date = null; + required_lock_date = ?(lock_until); //15 minutes past allocation date + + }); + + let manager_add = await canister.collection_update_nft_origyn(#UpdateManagers([Principal.fromActor(sale_canister)])); + //D.print("manager add" # debug_show(manager_add)); + + + + + D.print("adding unminted"); + + let add_unminted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "7"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "8"; + }), + ] + ); + + //we will allocate one specific to a + //we will allocate one group to a + + //we will have a register one and then buy one after + + + //we will allocate five to group b + + //we will have b regiser for 4 + + //b regiters for 2 + //b buy 2 + + //have c/d try to buy + + //create a defalut group with an allocation of 2 + + let defaultGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = ""; //default namespace + members = null; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000 : Nat; + symbol = "OGY"; + decimals = 8 : Nat; + standard = #Ledger; + }); + }]; + allowed_amount = ?2; + tier = 0; + additive = true; + } + )]); + + //create a specific group with allocation of 2 for b wallet + let aGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "agroup"; //default namespace + members = ?[Principal.fromActor(a_wallet)]; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?1; + additive = true; + tier = 1; + } + )]); + + //put b and c in b group + + //create a specific group with allocation of 2 for b wallet + let bGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "bgroup"; //default namespace + members = ?[Principal.fromActor(b_wallet),Principal.fromActor(c_wallet)]; + pricing = ? [#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?2; + additive = true; + tier = 2 + } + )]); + + //set up reservations: + + // allocate "1" to a_wallet + let a_principal_request = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = Principal.toText(Principal.fromActor(a_wallet)) # "individual"; + reservation_type = #Principal(Principal.fromActor(a_wallet)); + nfts : [Text] = ["1" : Text]; + exclusive = true; + } + ) + ]); + + // allocate "2","3" to a group a + + let a_group_request = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = "agroupreservation"; + reservation_type = #Groups(["agroup"]); + nfts : [Text] = ["2" : Text, "3"]; + exclusive = true; + } + ) + ]); + + // allocate "4,5,6,7,8" to group b + + let b_group_request = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = "bgroupreservation"; + reservation_type = #Groups(["bgroup"]); + nfts : [Text] = ["4", "5", "6", "7", "8"]; + exclusive = true; + } + ) + ]); + + D.print("finished group requets" # debug_show(b_group_request)); + + + + //leave d out but try to get one anyway...should fail + + + let advancer = await sale_canister.__advance_time(allocation_date + 1); + + let aRedeem_payment_2 = await a_wallet.send_ledger_payment(dfx_ledger, (30 * 10 ** 8) + 600000, Principal.fromActor(canister)); + + D.print("apayment"# debug_show(aRedeem_payment_2)); + + let a_wallet_try_escrow_general_valid = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(aRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 30 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + D.print("about to try registration"# debug_show(a_wallet_try_escrow_general_valid)); + //register escrow for one NFT + + let a_allocate_one = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + let a_wallet_try_redeem_for_one = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + //redeem escrow for the two more of the NFTs + + D.print("about to payment b"); + + //register b for 4 with additive + let bRedeem_payment_2 = await b_wallet.send_ledger_payment(dfx_ledger, (40 * 10 ** 8) + 800000, Principal.fromActor(canister)); + + D.print("about to escrow b" # debug_show(bRedeem_payment_2)); + + let b_wallet_try_escrow_general_valid = await b_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(bRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 40 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + D.print("about to register b" # debug_show(b_wallet_try_escrow_general_valid)); + + + let b_allocate_two = await b_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(b_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let b_wallet_try_redeem_for_two = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(b_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + + let b_wallet_registration_after_four = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(b_wallet)); + + //regist d for 2 with non-additive but should get allocated none due to reservations + + let dRedeem_payment_2 = await d_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let d_wallet_try_escrow_general_valid = await d_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(dRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + + let d_allocate_two = await d_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(d_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let d_wallet_try_redeem_for_two = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(d_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + //advance time + + D.print("registered for two d " # debug_show(d_wallet_try_redeem_for_two)); + + + + + //assure allocation is made + //ways to assure this + //make a new registration? + //make a new allocation? + //reedeem an allocatoin? + + let d_balance_before_allocation = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(d_wallet))); + + //shold be empty + let d_allocate_empty = await d_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(d_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + //should be 2 + let a_allocate_empty = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let a_wallet_try_redeem_for_one_more = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + D.print("reddem for one a " # debug_show(a_wallet_try_redeem_for_one_more)); + + + let b_allocate_empty = await b_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(b_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let b_wallet_try_redeem_for_one = await b_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(b_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + D.print("reddem for one b " # debug_show(b_wallet_try_redeem_for_one)); + + + + //should fail event though A qualifes for 3 because it is reserved for c + + let a_allocate_empty_after_two = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let a_wallet_try_redeem_for_third = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + D.print("reddem for third a " # debug_show(a_wallet_try_redeem_for_third)); + + + let cRedeem_payment_2 = await c_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let c_wallet_try_escrow_general_valid = await c_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(cRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + + + let c_allocate_empty_after_two = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 2; + token = ?dfx_token_spec; + }); + + let c_wallet_try_redeem_for_one = await c_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(c_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //one icp for one + + }};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + D.print("redeem for two c " # debug_show(c_wallet_try_redeem_for_one)); + + + + //check that allocation is updated + + let a_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(a_wallet)); + let b_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(b_wallet)); + let c_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(c_wallet)); + let d_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(d_wallet)); + + + D.print("balance after allocation " # debug_show(a_wallet_registration_after_allocation,b_wallet_registration_after_allocation,c_wallet_registration_after_allocation,d_wallet_registration_after_allocation)); + + + //claim + switch( + a_wallet_registration_after_allocation, + b_wallet_registration_after_allocation, + c_wallet_registration_after_allocation, + d_wallet_registration_after_allocation){ + case(#ok(a),#ok(b),#ok(c),#ok(d)){ + for(thisitem in a.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in b.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in c.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in d.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + }; + case(_,_,_,_) { + D.print("THROW ----------------- couldnt get the registratons after allocation" # debug_show(a_wallet_registration_after_allocation,b_wallet_registration_after_allocation,c_wallet_registration_after_allocation,d_wallet_registration_after_allocation)); + throw(Error.reject("THROW ----------------- couldnt get the registratons after allocation")); + }; + }; + + //check nft balance + + let a_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let c_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(c_wallet))); + let d_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(d_wallet))); + + + D.print("balance after three " # debug_show(a_wallet_balance_after_three,b_wallet_balance_after_three,c_wallet_balance_after_three,d_wallet_balance_after_three)); + + + //try to allocate nfts --- should be out of inventory + + let c_allocate_empty_2_end = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + + let a_allocate_empty_3_end = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + let d_allocate_empty_3_end = await d_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(d_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + + + //D.print("running suite test registrations"); + + let suite = S.suite("test registration", [ + + S.test("can register one item", switch(d_balance_before_allocation){case(#ok(res)){ + if(res.nfts.size() == 0){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("fail d is allocated inventory", switch(d_allocate_empty){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("fail if a is allocated more than 1 inventory", switch(a_allocate_empty){case(#ok(res)){ + if(res.allocation_size == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("b should be allocated two more", switch(a_allocate_empty){case(#ok(res)){ + if(res.allocation_size == 2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("fail d is allocated inventory", switch(a_allocate_empty_after_two){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail a if allocating for a un allocated item", switch(a_wallet_try_redeem_for_third){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5001){ //allocation doesnt exist + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("c can register for one", switch(c_allocate_empty_after_two){case(#ok(res)){ + if(res.allocation_size == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("c can register for one", switch(c_wallet_try_redeem_for_one){case(#ok(res)){ + if(res.nfts.size() == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("only 8 items allocated in balance", switch( + a_wallet_balance_after_three, + b_wallet_balance_after_three, + c_wallet_balance_after_three, + d_wallet_balance_after_three){ + case(#ok(a),#ok(b),#ok(c),#ok(d)){ + if(a.nfts.size() + b.nfts.size() + c.nfts.size() + d.nfts.size() == 8){ + "expected success" + } else { + "unexpected success" # debug_show((a,b,c,d)) + }}; + case(_,_,_,_) { + "wrong error " # debug_show((a_wallet_balance_after_three, b_wallet_balance_after_three, c_wallet_balance_after_three, d_wallet_balance_after_three)); + }; + }, M.equals(T.text("expected success"))), + + S.test("fail if is allocated inventory", switch(c_allocate_empty_2_end){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("fail if is allocated inventory", switch(a_allocate_empty_3_end){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("fail if is allocated inventory", switch(d_allocate_empty_3_end){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //empty inventory because all are allocated to reservations + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testRegistration() : async {#success; #fail : Text} { + D.print("running testRegistration"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + let d_wallet = await TestWalletDef.test_wallet(); + let e_wallet = await TestWalletDef.test_wallet(); + let f_wallet = await TestWalletDef.test_wallet(); + + //D.print("have wallets"); + + //fund wallets + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_2 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_5 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(c_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_3 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(d_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_4 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(e_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let funding_result_6 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(f_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 100 * 10 ** 8};}); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + + + //D.print("have canister"); + + //D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + let standardStage4 = await utils.buildStandardNFT("4", canister, Principal.fromActor(canister), 1024, false); + let standardStage5 = await utils.buildStandardNFT("5", canister, Principal.fromActor(canister), 1024, false); + let standardStage6 = await utils.buildStandardNFT("6", canister, Principal.fromActor(canister), 1024, false); + let standardStage7 = await utils.buildStandardNFT("7", canister, Principal.fromActor(canister), 1024, false); + let standardStage8 = await utils.buildStandardNFT("8", canister, Principal.fromActor(canister), 1024, false); + + let registration_date = Time.now() + 100000000000; + let allocation_date = Time.now() + 900000000000; + let lock_until = allocation_date + 900000000000; + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = ?Principal.fromActor(canister); + sale_open_date = ?(allocation_date); // in 15 minutes + registration_date = ?registration_date; + end_date = null; + required_lock_date = ?(lock_until); //15 minutes past allocation date + + }); + + let manager_add = await canister.collection_update_nft_origyn(#UpdateManagers([Principal.fromActor(sale_canister)])); + //D.print("manager add" # debug_show(manager_add)); + + + //D.print("adding unminted"); + + let add_unminted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + ] + ); + + //create a defalut group with an allocation of 2 + + let defaultGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = ""; //default namespace + members = null; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000 : Nat; + symbol = "OGY"; + decimals = 8 : Nat; + standard = #Ledger; + }); + }]; + allowed_amount = ?5; + tier = 0; + additive = true; + } + )]); + + //create a specific group with allocation of 2 for b wallet + let bGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "bgroup"; //default namespace + members = ?[Principal.fromActor(b_wallet)]; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?2; + additive = true; + tier = 1; + } + )]); + + //create a specific group with allocation of 2 for b wallet + let dGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = ""; //default namespace + members = ?[Principal.fromActor(d_wallet)]; + pricing = ? [#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?2; + additive = false; + tier = 2 + } + )]); + + + //have a redeem thier allocation + + let fRedeem_payment = await f_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let f_wallet_try_escrow_general_no_lock = await f_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(fRedeem_payment){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?(lock_until - 1)); + + + //register before open + let f_wallet_try_registration_before_open = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), {principal = Principal.fromActor(a_wallet); max_desired =1; escrow_receipt = + ?{ + buyer = #principal(Principal.fromActor(a_wallet)); + seller = #principal(Principal.fromActor(canister)); + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one token + }}); + + + + + //register with fake escrow + let a_wallet_try_registration_no_escrow = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(a_wallet);max_desired =1; escrow_receipt = + ?{ + buyer = #principal(Principal.fromActor(a_wallet)); + seller = #principal(Principal.fromActor(canister)); + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one token + }}); + + //send payment to nft canister + + let aRedeem_payment = await a_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let a_wallet_try_escrow_general_no_lock = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(aRedeem_payment){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?(lock_until - 1)); + + //register escrow with no lock past mint date + + let a_wallet_try_registration_bad_lock = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(a_wallet);max_desired = 1; escrow_receipt = switch(a_wallet_try_escrow_general_no_lock){case(#ok(val)){?val.receipt};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + //create a new payment with lock + + let aRedeem_payment_2 = await a_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400001, Principal.fromActor(canister)); + + D.print("escrow general valid a" # debug_show(aRedeem_payment_2) ); + + let a_wallet_try_escrow_general_valid = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(aRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, (20 * 10 ** 8) + 1, ?dfx_token_spec, ?lock_until); + + D.print("escrow general valid a" # debug_show(a_wallet_try_escrow_general_valid) ); + //register escrow with not enough payment for at least 1 NFT + + let a_wallet_try_registration_low_amount = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), {principal = Principal.fromActor(a_wallet);max_desired=1; escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 9 * 10 ** 8; //one token short + + }};case(#err(err)){ + D.print("THROW ----------------- failed a register for low" # debug_show(a_wallet_try_escrow_general_valid) ); + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + //register escrow for one NFT + + let a_wallet_try_redeem_for_one = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(a_wallet);max_desired = 1; escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){ + D.print("THROW ----------------- failed a register for 1" # debug_show(a_wallet_try_escrow_general_valid) ); + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + //check that registration is updated + + let a_wallet_registration_after_one = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(a_wallet)); + + D.print("a_wallet_registration_after_one " # debug_show(a_wallet_registration_after_one) ); + + //redeem escrow for the two more of the NFTs + + let a_wallet_try_register_for_two = await a_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(a_wallet);max_desired = 2; escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //20 icp for two + }};case(#err(err)){ + D.print("THROW ----------------- failed a register for 2" # debug_show(a_wallet_registration_after_one) ); + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + let a_wallet_registration_after_two = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(a_wallet)); + + + //register b for 4 with additive + let bRedeem_payment_2 = await b_wallet.send_ledger_payment(dfx_ledger, (40 * 10 ** 8) + 800000, Principal.fromActor(canister)); + + let b_wallet_try_escrow_general_valid = await b_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(bRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 40 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + let b_wallet_try_register_for_four = await b_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(b_wallet);max_desired = 2; escrow_receipt = switch(b_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 40 * 10 ** 8; //20 icp for two + }};case(#err(err)){ + D.print("THROW ----------------- failed b register for 2" # debug_show(b_wallet_try_escrow_general_valid) ); + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + let b_wallet_registration_after_four = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(b_wallet)); + + //regist d for 2 with non-additive + + let dRedeem_payment_2 = await d_wallet.send_ledger_payment(dfx_ledger, (40 * 10 ** 8) + 800000, Principal.fromActor(canister)); + + let d_wallet_try_escrow_general_valid = await d_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(dRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 40 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + let d_wallet_try_register_for_four= await d_wallet.try_sale_registration(Principal.fromActor(sale_canister), {principal = Principal.fromActor(d_wallet); max_desired = 2; escrow_receipt = switch(d_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 40 * 10 ** 8; //40 icp for four but shold only get two + }};case(#err(err)){ + D.print("THROW ----------------- failed d register for 4" # debug_show(b_wallet_try_escrow_general_valid) ); + + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + let d_wallet_registration_after_four = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(d_wallet)); + + //register e for 2 with general + let eRedeem_payment_2 = await e_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let e_wallet_try_escrow_general_valid = await e_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(eRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + let e_wallet_try_register_for_two = await e_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(e_wallet);max_desired = 2; escrow_receipt = switch(e_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //40 icp for four but shold only get two + }};case(#err(err)){ + D.print("THROW ----------------- failed e register for 2" # debug_show(e_wallet_try_escrow_general_valid) ); + throw( + + + Error.reject("THROW ----------------- failed e register for 2"))}}}); + + let e_wallet_registration_after_two = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(e_wallet)); + + + //total 10 registrations for 6 items + + + //advance time + + let advancer = await sale_canister.__advance_time(allocation_date + 1); + + //assure allocation is made + //ways to assure this + //make a new registration? + //make a new allocation? + //reedeem an allocatoin? + + let c_allocate_empty = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + let a_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(a_wallet)); + let b_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(b_wallet)); + let d_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(d_wallet)); + let e_wallet_registration_after_allocation = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(e_wallet)); + + + + //claim + switch( + a_wallet_registration_after_allocation, + b_wallet_registration_after_allocation, + d_wallet_registration_after_allocation, + e_wallet_registration_after_allocation){ + case(#ok(a),#ok(b),#ok(d),#ok(e)){ + for(thisitem in a.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in b.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in d.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + for(thisitem in e.allocation.vals()){ + let claim_result = await sale_canister.execute_claim_sale_nft_origyn(thisitem.token_id); + }; + }; + case(_,_,_,_) { + D.print("THROW ----------------- failed to get registrations" # debug_show(a_wallet_registration_after_allocation,b_wallet_registration_after_allocation,d_wallet_registration_after_allocation,e_wallet_registration_after_allocation) ); + + throw(Error.reject("THROW ----------------- couldnt get the registratons after allocation")); + }; + }; + + //check nft balance + + let a_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + let b_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(b_wallet))); + let d_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(d_wallet))); + let e_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(e_wallet))); + + + + + //todo: make sure we can't register after the sale_opened_date + + let cRedeem_payment_2 = await c_wallet.send_ledger_payment(dfx_ledger, (20 * 10 ** 8) + 400000, Principal.fromActor(canister)); + + let c_wallet_try_escrow_general_valid = await c_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(cRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 20 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + + + + + + let c_wallet_try_register_after_sale_date = await c_wallet.try_sale_registration(Principal.fromActor(sale_canister), { principal = Principal.fromActor(c_wallet);max_desired = 1; escrow_receipt = switch(c_wallet_try_escrow_general_valid){case(#ok(val)){ + ?{ + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //20 icp for two + }};case(#err(err)){ + D.print("THROW ----------------- failed sale registration c" # debug_show(c_wallet_try_escrow_general_valid) ); + throw(Error.reject("THROW ----------------- failed sale registration c"))}}}); + + let c_wallet_registration_after_sale_date = await sale_canister.get_registration_sale_nft_origyn(Principal.fromActor(c_wallet)); + + + //try to allocate nfts --- should be out of inventory + + let c_allocate_empty_2 = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + + + //D.print("running suite test registrations"); + + let suite = S.suite("test registration", [ + S.test("fail if registering before open", switch(f_wallet_try_registration_before_open){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5005){ //improper escrwo + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if registering no escrow", switch(a_wallet_try_registration_no_escrow){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5003){ //improper escrwo + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if no lock", switch(a_wallet_try_registration_bad_lock){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5002){ //improper escrwo + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if not enough tokens", switch(a_wallet_try_registration_low_amount){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5003){ //improper escrwo + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("can register one item", switch(a_wallet_try_redeem_for_one){case(#ok(res)){ + if(res.max_desired == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can get reg afterone item", switch(a_wallet_registration_after_one){case(#ok(res)){ + if(res.max_desired == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can register two item replace", switch(a_wallet_try_register_for_two){case(#ok(res)){ + if(res.max_desired == 2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can get reg after two item", switch(a_wallet_registration_after_two){case(#ok(res)){ + if(res.max_desired == 1){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can register four item additive", switch(b_wallet_try_register_for_four){case(#ok(res)){ + if(res.max_desired == 4){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can get reg after four item b", switch(b_wallet_registration_after_four){case(#ok(res)){ + if(res.max_desired == 4){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("can register d four item non additive", switch(d_wallet_try_register_for_four){case(#ok(res)){ + if(res.max_desired == 2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can get reg after four item d", switch(d_wallet_registration_after_four){case(#ok(res)){ + if(res.max_desired == 4){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("can register e two item non-additive", switch(e_wallet_try_register_for_two){case(#ok(res)){ + if(res.max_desired == 2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can get reg after four item e", switch(e_wallet_registration_after_two){case(#ok(res)){ + if(res.max_desired == 2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("only 6 items allocated", switch( + a_wallet_registration_after_allocation, + b_wallet_registration_after_allocation, + d_wallet_registration_after_allocation, + e_wallet_registration_after_allocation){ + case(#ok(a),#ok(b),#ok(d),#ok(e)){ + if(a.allocation.size() + b.allocation.size() + d.allocation.size() + e.allocation.size() == 6){ + "expected success" + } else { + "unexpected success" # debug_show((a,b,d,e)) + }}; + case(_,_,_,_) { + "wrong error " # debug_show((a_wallet_registration_after_allocation, b_wallet_registration_after_allocation, d_wallet_registration_after_allocation, e_wallet_registration_after_allocation)); + }; + }, M.equals(T.text("expected success"))), + + S.test("only 6 items allocated in balance", switch( + a_wallet_balance_after_three, + b_wallet_balance_after_three, + d_wallet_balance_after_three, + e_wallet_balance_after_three){ + case(#ok(a),#ok(b),#ok(d),#ok(e)){ + if(a.nfts.size() + b.nfts.size() + d.nfts.size() + e.nfts.size() == 6){ + "expected success" + } else { + "unexpected success" # debug_show((a,b,d,e)) + }}; + case(_,_,_,_) { + "wrong error " # debug_show((a_wallet_balance_after_three, b_wallet_balance_after_three, d_wallet_balance_after_three, e_wallet_balance_after_three)); + }; + }, M.equals(T.text("expected success"))), + S.test("cannot register after sale ends", switch(c_wallet_try_register_after_sale_date){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5005){ //improper escrwo + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("allocation doesnt work inventory cleared during allocation", switch(c_allocate_empty){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //improper escrwo + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("allocation doesnt work inventory cleared during allocation", switch(c_allocate_empty_2){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ //improper escrow + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testRedeemAllocation() : async {#success; #fail : Text} { + D.print("running testRedeemAllocation"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + + //D.print("have wallets"); + + //fund wallets + + let dfx : DFXTypes.Service = actor(Principal.toText(dfx_ledger)); + let funding_result = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(a_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + //D.print("funding a" # debug_show(funding_result)); + + let funding_result_2 = await dfx.transfer({ + to = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(Principal.fromActor(b_wallet), null))); + fee = {e8s = 200_000}; + memo = 1; + from_subaccount = null; + created_at_time = null; + amount = {e8s = 1000 * 10 ** 8};}); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + + + //D.print("have canister" # debug_show(Principal.fromActor(canister))); + + //D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + let standardStage4 = await utils.buildStandardNFT("4", canister, Principal.fromActor(canister), 1024, false); + let standardStage5 = await utils.buildStandardNFT("5", canister, Principal.fromActor(canister), 1024, false); + let standardStage6 = await utils.buildStandardNFT("6", canister, Principal.fromActor(canister), 1024, false); + let standardStage7 = await utils.buildStandardNFT("7", canister, Principal.fromActor(canister), 1024, false); + let standardStage8 = await utils.buildStandardNFT("8", canister, Principal.fromActor(canister), 1024, false); + + + let allocation_date = Time.now() + 900000000000; + let lock_until = allocation_date + 900000000000; + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = ?Principal.fromActor(canister); + sale_open_date = ?(allocation_date); // in 15 minutes + registration_date = null; + end_date = null; + required_lock_date = ?(lock_until); //15 minutes past allocation date + + }); + + //D.print("sales canister is " # debug_show(Principal.fromActor(sale_canister), Principal.fromActor(canister))); + let current_manager = switch(await canister.collection_nft_origyn(null)){ + case(#err(err)){[]}; + case(#ok(val)){ + switch(val.managers){ + case(null){[]}; + case(?val){val}; + }; + }; + }; + //D.print("current manager add" # debug_show(current_manager, Principal.fromActor(canister))); + let manager_add = await canister.collection_update_nft_origyn(#UpdateManagers([Principal.fromActor(sale_canister)])); + //D.print("manager add" # debug_show(manager_add)); + + + D.print("adding add_unminted_1"); + + let add_unminted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + ] + ); + + D.print(debug_show(add_unminted_1)); + + //create a defalut group with an allocation of 2 + + let defaultGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = ""; //default namespace + members = null; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000 : Nat; + symbol = "OGY"; + decimals = 8 : Nat; + standard = #Ledger; + }); + }]; + allowed_amount = ?5; + tier = 0; + additive = true; + } + )]); + + + D.print("making reservation"); + + //creat a default reservation + let reserve = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = "default"; + reservation_type = #Groups([""]); + exclusive = false; + nfts = ["1","2","3","4","5","6"]; + }) + ]); + + + D.print("allocating 1"); + + //allocate 5 nfts + let allocate_1 = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 5; + token = ?dfx_token_spec; + }); + + D.print("allocating 1" # debug_show(allocate_1)); + + //check allocation balances + + let balance_check_1 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + + D.print("balance_check_1 1" # debug_show(balance_check_1)); + + //have b try to redeem the allocation should fail + + let bRedeem_payment = await b_wallet.send_ledger_payment(dfx_ledger, (10 * 10 ** 8) + 200000, Principal.fromActor(canister)); + + let b_wallet_try_escrow_general = await b_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(bRedeem_payment){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 10 * 10 ** 8, ?dfx_token_spec, ?lock_until); + + let b_wallet_allocation_attempt = await b_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(b_wallet_try_escrow_general){case(#ok(val)){val.receipt};case(#err(err)){throw(Error.reject("THROW ----------------- failed to get escrow for b payment in testRedeem"))}}}); + + //have a redeem thier allocation + D.print("fake escrow"); + + //redeem with fake escrow + let a_wallet_try_redeem_no_escrow = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = { + buyer = #principal(Principal.fromActor(a_wallet)); + seller = #principal(Principal.fromActor(canister)); + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one token + }}); + + D.print("a_wallet_try_redeem_no_escrow 1" # debug_show(a_wallet_try_redeem_no_escrow)); + + + + //create a new payment with lock + + let aRedeem_payment_2 = await a_wallet.send_ledger_payment(dfx_ledger, (30 * 10 ** 8) + 600000, Principal.fromActor(canister)); + + D.print("attempted payment " # debug_show(aRedeem_payment_2)); + + let a_wallet_try_escrow_general_valid = await a_wallet.try_escrow_general_staged(Principal.fromActor(canister), Principal.fromActor(canister), dfx_ledger, switch(aRedeem_payment_2){case(#ok(val)){?Nat64.toNat(val)};case(#err(err)){?0};}, 30 * 10 ** 8, ?dfx_token_spec, null); + + //redeem escrow with not enough payment for at least 1 NFT + + let a_wallet_try_redeem_low_amount = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 9 * 10 ** 8; //one token short + + }};case(#err(err)){ + D.print("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_low_amount" # debug_show(err)); + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_low_amount"))}}}); + + D.print("a_wallet_try_redeem_low_amount 1" # debug_show(a_wallet_try_redeem_low_amount)); + + + //redeem escrow for one NFT + + let a_wallet_try_redeem_for_one = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; //one icp for one + + }};case(#err(err)){D.print("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_one"); + + throw( + + Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_one"))}}}); + + D.print("a good redeem" # debug_show(a_wallet_try_redeem_for_one)); + //check that allocation is updated + + let a_wallet_allocation_after_one = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + + + D.print("a_wallet_allocation_after_one" # debug_show(a_wallet_allocation_after_one)); + + let a_wallet_balance_after_one = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("a_wallet_balance_after_one" # debug_show(a_wallet_balance_after_one)); + + //redeem escrow for the two more of the NFTs + + let a_wallet_try_redeem_for_two = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 20 * 10 ** 8; //20 icp for two + + }};case(#err(err)){ + D.print("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_two"); + throw( + + + Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_two"))}}}); + + D.print("a_wallet_try_redeem_for_two 1" # debug_show(a_wallet_try_redeem_for_two)); + + D.print("a_wallet_try_redeem_for_two" # debug_show(a_wallet_try_redeem_for_two)); + + let a_wallet_allocation_after_three = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + + D.print("a_wallet_allocation_after_three" # debug_show(a_wallet_allocation_after_three)); + + + let a_wallet_balance_after_three = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + //redeem escrow with enough for 10 NFT...make sure only the last two ae allocated + + D.print("a_wallet_balance_after_three" # debug_show(a_wallet_balance_after_three)); + + + let a_wallet_try_redeem_for_ten = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 100 * 10 ** 8; //10 icp for ten + + }};case(#err(err)){ + D.print("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_ten"); + throw( + + Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_ten"))}}}); + + //check that allocation is deleted + + D.print("a_wallet_try_redeem_for_ten" # debug_show(a_wallet_try_redeem_for_ten)); + + + let a_wallet_allocation_after_ten = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + + + D.print("a_wallet_allocation_after_ten" # debug_show(a_wallet_allocation_after_ten)); + + + let a_wallet_balance_after_ten = await canister.balance_of_nft_origyn(#principal(Principal.fromActor(a_wallet))); + + D.print("a_wallet_balance_after_ten" # debug_show(a_wallet_balance_after_ten)); + + //advance time + + let advancer = await sale_canister.__advance_time(allocation_date + 1); + + //check allocation balance + + let a_wallet_allocation_after_expiration = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + + D.print("a_wallet_allocation_after_expiration" # debug_show(a_wallet_allocation_after_expiration)); + + + //allocate an nft + + let allocate_2 = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + D.print("allocate_2" # debug_show(allocate_2)); + + + //advance time passed escrow + + let advancer2 = await sale_canister.__advance_time(allocation_date + 1 + 900000000000 + 1); + + + //try to redeem escrow for expired allocation + + let a_wallet_try_redeem_for_expired = await a_wallet.try_sale_nft_redeem(Principal.fromActor(sale_canister), { escrow_receipt = switch(a_wallet_try_escrow_general_valid){case(#ok(val)){ + { + buyer = val.receipt.buyer; + seller = val.receipt.seller; + token = dfx_token_spec; + token_id = ""; + amount = 10 * 10 ** 8; + + }};case(#err(err)){ + //D.print("THROW ----------------- failed to get escrow for a payment in testRedeem for a_wallet_try_redeem_for_expired"); + throw(Error.reject("THROW ----------------- failed to get escrow for a payment in testRedeem for bad lock"))}}}); + + + + + + + //D.print("running suite test redeem"); + + D.print("a_wallet_try_redeem_for_expired" # debug_show(a_wallet_try_redeem_for_expired)); + + + + let suite = S.suite("test allocations", [ + + S.test("fail if redeeming with no allocation", switch(b_wallet_allocation_attempt){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5001){ //allocation does not exist + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if redeeming with a non existant escrow", switch(a_wallet_try_redeem_no_escrow){case(#ok(res)){ + //D.print("unexpected success"# debug_show(res)); + if(res.nfts.size() == 1){ + switch(res.nfts[0].transaction){ + case(#ok(trxres)){ + "unexpected success " # debug_show(res); + }; + case(#err(err)){ + if(err.number == 64){ //bad transaction error + "correct error"; + } else { + "unexpected error" # debug_show((err, res)); + }; + }; + + } + } else { + "unexpected size " # debug_show(res); + }; + }; + + case(#err(err)){ + //D.print("unexpected err"# debug_show(err)); + if(err.number == 5001){ //escrow does not exist + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct error"))), + + S.test("fail if the payment was too low", switch(a_wallet_try_redeem_low_amount){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5003){ //improper low payment + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("can allocate one item", switch(a_wallet_try_redeem_for_one){case(#ok(res)){ + if(res.nfts.size() == 1){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can allocate one item", switch(a_wallet_balance_after_one){case(#ok(res)){ + if(res.nfts.size() == 1){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("allocation balance after one items", switch(a_wallet_allocation_after_one){case(#ok(res)){ + if(res.allocation_size == 4){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + S.test("can allocate additive two items", switch(a_wallet_allocation_after_three){case(#ok(res)){ + if(res.allocation_size == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can allocate additive two items", switch(a_wallet_try_redeem_for_two){case(#ok(res)){ + if(res.nfts.size() == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("has 3", switch(a_wallet_balance_after_three){case(#ok(res)){ + if(res.nfts.size() == 3){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("cannot over allocate ten items", switch(a_wallet_try_redeem_for_ten){case(#ok(res)){ + if(res.nfts.size() == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("has 3", switch(a_wallet_balance_after_ten){case(#ok(res)){ + if(res.nfts.size() == 3){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testAllocation() : async {#success; #fail : Text} { + //D.print("running testMarketTransfer"); + + D.print("in test allocations"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + let c_wallet = await TestWalletDef.test_wallet(); + let d_wallet = await TestWalletDef.test_wallet(); + + //D.print("have wallets"); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + //D.print("have canister"); + + //D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + let standardStage4 = await utils.buildStandardNFT("4", canister, Principal.fromActor(canister), 1024, false); + let standardStage5 = await utils.buildStandardNFT("5", canister, Principal.fromActor(canister), 1024, false); + let standardStage6 = await utils.buildStandardNFT("6", canister, Principal.fromActor(canister), 1024, false); + let standardStage7 = await utils.buildStandardNFT("7", canister, Principal.fromActor(canister), 1024, false); + let standardStage8 = await utils.buildStandardNFT("8", canister, Principal.fromActor(canister), 1024, false); + + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = ?Principal.fromActor(canister); + sale_open_date = null; + registration_date = null; + end_date = null; + required_lock_date = null; + + }); + + let set_time = await sale_canister.__set_time_mode(#test); + let set_time2 = await sale_canister.__advance_time(Time.now()); + + + //D.print("adding auth"); + + //add items as authorized + //NFT-229 + let add_unminted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "5"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "6"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "7"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "8"; + }), + ] + ); + + //create a defalut group with an allocation of 2 + + let defaultGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = ""; //default namespace + members = null; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000 : Nat; + symbol = "OGY"; + decimals = 8 : Nat; + standard = #Ledger; + }); + }]; + allowed_amount = ?2; + tier = 0; + additive = true; + } + )]); + + + + //create a specific group with allocation of 2 for b wallet + let bGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "bgroup"; //default namespace + members = ?[Principal.fromActor(b_wallet)]; + pricing = ?[#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?2; + additive = true; + tier = 1; + } + )]); + + //create a specific group with allocation of 2 for b wallet + let cGroup = await sale_canister.manage_group_sale_nft_origyn([#update({ + namespace = "cgroup"; //default namespace + members = ?[Principal.fromActor(c_wallet)]; + pricing = ? [#cost_per{ + amount = 1000000000; + token = #ic({ + canister = dfx_ledger; + fee = 200000; + symbol = "OGY"; + decimals = 8; + standard = #Ledger; + }) + }]; + allowed_amount = ?2; + additive = false; + tier = 2 + } + )]); + + //creat a default reservation + let reserve = await sale_canister.manage_reservation_sale_nft_origyn([ + #add({ + namespace = "default"; + reservation_type = #Groups([""]); + exclusive = false; + nfts = ["1","2","3","4","5","6","7","8"]; + }) + ]); + + + + //set allocation expiration to 15 minutes + + //D.print("allocating details"); + + + //allocate 0 nfts should fail + let allocate_0 = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 0; + token = ?dfx_token_spec; + }); + + //D.print(debug_show(allocate_0)); + + //allocate 4 nfts should get 2 + let allocate_1 = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 4; + token = ?dfx_token_spec; + }); + + D.print("allocate 1 " #debug_show(allocate_1)); + + //try to allocate 6 nfts, should allocate 4 (defalut + special group) + let allocate_2 = await b_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(b_wallet); + number_to_allocate = 6; + token = ?dfx_token_spec; + }); + + D.print("allocate 2 " #debug_show(allocate_2)); + + //try to allocate 6 more, should allocate 2 since c group is not additive + let allocate_3 = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(c_wallet); + number_to_allocate = 6; + token = ?dfx_token_spec; + }); + + //D.print("allocate 3 " #debug_show(allocate_3)); + + + //try allocate 10 more, should fail + let allocate_4 = await c_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 10; + token = ?dfx_token_spec; + }); + + //D.print("allocate 4 " #debug_show(allocate_4)); + + //try allocate some for d but all have been allocated + let allocate_5 = await d_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(d_wallet); + number_to_allocate = 10; + token = ?dfx_token_spec; + }); + //D.print("allocate 5 " #debug_show(allocate_5)); + + //check allocation balances + //D.print("checking balances details"); + + let balance_check_1 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + let balance_check_2 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(b_wallet)); + let balance_check_3 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(c_wallet)); + + + + //items are returned to the pool after expiration + let expiration = switch(allocate_1){ + case(#ok(val)){ + val.expiration + }; + case(#err(err)){ + //D.print("THROW ----------------- we cant simulate time because the allocation didnt work in test_allocation " # debug_show(err)); + throw(Error.reject("THROW ----------------- we cant simulate time because the allocation didnt work in test_allocation " # debug_show(err))); + } + }; + //D.print("advancing time " # debug_show(Time.now(), expiration)); + + let advancer = await sale_canister.__advance_time(expiration + 1); + + + let expired_check_1 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(a_wallet)); + let expired_check_2 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(b_wallet)); + let expired_check_3 = await sale_canister.get_allocation_sale_nft_origyn(Principal.fromActor(c_wallet)); + + //can allocate again + + //allocate 4 nfts should get 2 + let allocate__retry_1 = await a_wallet.try_sale_nft_allocation(Principal.fromActor(sale_canister),{ + principal = Principal.fromActor(a_wallet); + number_to_allocate = 1; + token = ?dfx_token_spec; + }); + + //D.print("running suite test allocations"); + + let suite = S.suite("test allocations", [ + + S.test("allocate 0 items should fail", switch(allocate_0){case(#ok(res)){ + + "unexpected success" # debug_show(res) + };case(#err(err)){ + if(err.number == 5000){ //improper allocation + "correct error"; + } else { + "wrong error " # debug_show(err); + } + + };}, M.equals(T.text("correct error"))), //NFT-235, NFT-237 + S.test("can allocate default items", switch(allocate_1){case(#ok(res)){ + if(res.allocation_size == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-235 + S.test("can allocate additive items", switch(allocate_2){case(#ok(res)){ + if(res.allocation_size == 4){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-235 + S.test("can allocate non additive items", switch(allocate_3){case(#ok(res)){ + if(res.allocation_size == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-236 + + S.test("fail if no nfts available to allocate", switch(allocate_5){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5004){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //NFT-239 + + S.test("a has 2", switch(balance_check_1){case(#ok(res)){ + if(res.allocation_size == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("b has 4", switch(balance_check_2){case(#ok(res)){ + if(res.allocation_size == 4){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("c has 2", switch(balance_check_3){case(#ok(res)){ + if(res.allocation_size == 2){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + + + S.test("a has expired", switch(expired_check_1){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5001){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + + S.test("b has expired", switch(expired_check_2){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5001){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + + S.test("c has expired", switch(expired_check_3){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 5001){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("can do a new allocation after expiration", switch(allocate__retry_1){case(#ok(res)){ + if(res.allocation_size == 1){ + + "expected success" + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-236 + + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testManagement() : async {#success; #fail : Text} { + D.print("running testManagement"); + + + + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = null; + sale_open_date = null; + registration_date = null; + end_date = null; + required_lock_date = null; + }); + + //D.print("have canister"); + + + let set_canister_gateway = await sale_canister.manage_sale_nft_origyn(#UpdateNFTGateway(?Principal.fromActor(this))); + + D.print("have set_canister_gateway"); + + let set_canister_expiration = await sale_canister.manage_sale_nft_origyn(#UpdateAllocationExpiration(555)); + + //D.print("have set_canister_expiration"); + + + + let a_now = Time.now(); + + let set_canister_sale_open_date = await sale_canister.manage_sale_nft_origyn(#UpdateSaleOpenDate(?(a_now + 1))); + + //D.print("have set_canister_sale_open_date"); + + let set_canister_registration_date = await sale_canister.manage_sale_nft_origyn(#UpdateRegistrationDate(?(a_now + 2))); + + //D.print("have set_canister_registration_date"); + + let set_canister_end_date = await sale_canister.manage_sale_nft_origyn(#UpdateEndDate(?(a_now + 3))); + + //D.print("getting metrics"); + let canister_metrics = await sale_canister.get_metrics_sale_nft_origyn(); + + //D.print("have metrics"); + let set_canister_sale_open_date_low = await sale_canister.manage_sale_nft_origyn(#UpdateSaleOpenDate(?(1))); + let set_canister_registration_date_low = await sale_canister.manage_sale_nft_origyn(#UpdateRegistrationDate(?(2))); + let set_canister_end_date_low = await sale_canister.manage_sale_nft_origyn(#UpdateEndDate(?(3))); + + let set_canister_sale_open_date_high = await sale_canister.manage_sale_nft_origyn(#UpdateSaleOpenDate(?(10000000000000000000000000000000000))); + let set_canister_registration_date_high = await sale_canister.manage_sale_nft_origyn(#UpdateRegistrationDate(?(210000000000000000000000000000000000))); + let set_canister_end_date_high = await sale_canister.manage_sale_nft_origyn(#UpdateEndDate(?(310000000000000000000000000000000000))); + + let set_canister_owner = await sale_canister.manage_sale_nft_origyn(#UpdateOwner(dfx_ledger)); + + //D.print("getting metrics"); + let owner_change_metrics = await sale_canister.get_metrics_sale_nft_origyn(); + + //D.print("have set_canister_owner"); + + //D.print("running suite test management"); + + let suite = S.suite("test managment", [ + + S.test("can change gateway", switch(canister_metrics){case(#ok(res)){ + if(Option.isSome(res.nft_gateway) == true){ + "expected success"; + } else { + "unexpected success" # debug_show(res) + } + };case(#err(err)){ + + "wrong error " # debug_show(err); + + + };}, M.equals(T.text("expected success"))), + S.test("can change expiration", switch(canister_metrics){case(#ok(res)){ + if(res.allocation_expiration == 555){ + "expected success"; + } else { + "unexpected success" # debug_show(res) + } + };case(#err(err)){ + + "wrong error " # debug_show(err); + + + };}, M.equals(T.text("expected success"))), + S.test("can change owner", switch(owner_change_metrics){case(#ok(res)){ + if(res.owner == dfx_ledger){ + "expected success" + } else { + "unexpected success" # debug_show(res) + } + + };case(#err(err)){ + + "wrong error " # debug_show(err); + + + };}, M.equals(T.text("expected success"))), + S.test("can change mintdate", switch(canister_metrics){case(#ok(res)){ + switch(res.sale_open_date){ + case(null){"unexpected success" # debug_show(res)}; + case(?val){ + if(val == a_now + 1){ + "expected success"; + } else { + "unexpected success" # debug_show(res) + } + }; + }; + + + };case(#err(err)){ + + "wrong error " # debug_show(err); + + + };}, M.equals(T.text("expected success"))), + S.test("can change reservation date", switch(canister_metrics){case(#ok(res)){ + switch(res.registration_date){ + case(null){"unexpected success" # debug_show(res)}; + case(?val){ + if(val == a_now + 2){ + "expected success"; + } else { + "unexpected success" # debug_show(res) + } + }; + }; + + + };case(#err(err)){ + + "wrong error " # debug_show(err); + + + };}, M.equals(T.text("expected success"))), + S.test("can change end date", switch(canister_metrics){case(#ok(res)){ + switch(res.end_date){ + case(null){"unexpected success" # debug_show(res)}; + case(?val){ + if(val == a_now + 3){ + "expected success"; + } else { + "unexpected success" # debug_show(res) + } + }; + }; + };case(#err(err)){ + + "wrong error " # debug_show(err); + + + };}, M.equals(T.text("expected success"))), + S.test("fail if canister date set low", switch(set_canister_sale_open_date_low){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 16){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if canister registration set low", switch(set_canister_registration_date_low){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 16){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if canister end date set low", switch(set_canister_end_date_low){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 16){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if canister sale date set high", switch(set_canister_sale_open_date_high){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 16){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if registration date set high", switch(set_canister_registration_date_high){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 16){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), + S.test("fail if end date set high", switch(set_canister_end_date_high){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 16){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))) + + + ]); + + S.run(suite); + + return #success; + }; + + public shared func testLoadNFTS() : async {#success; #fail : Text} { + + + D.print("in test loads nft"); + + let a_wallet = await TestWalletDef.test_wallet(); + let b_wallet = await TestWalletDef.test_wallet(); + + //D.print("have wallets"); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = null; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + //D.print("have canister"); + + D.print("calling stage"); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 1024, false); + let standardStage2 = await utils.buildStandardNFT("2", canister, Principal.fromActor(canister), 1024, false); + let standardStage3 = await utils.buildStandardNFT("3", canister, Principal.fromActor(canister), 1024, false); + let standardStage4 = await utils.buildStandardNFT("4", canister, Principal.fromActor(canister), 1024, false); + + //mint 2 + let mint_attempt = await canister.mint_nft_origyn("1", #principal(Principal.fromActor(this))); + let mint_attempt2 = await canister.mint_nft_origyn("2", #principal(Principal.fromActor(this))); + + + D.print("minted"); + //create sales canister + let sale_canister = await Sales.SaleCanister({ + owner = Principal.fromActor(this); + allocation_expiration = 900000000000; + nft_gateway = ?Principal.fromActor(canister); + sale_open_date = null; + registration_date = null; + end_date = null; + required_lock_date = null; + + }); + + //add items as an unauthorized user NFT-231 + //D.print("attempting uauth add"); + + let add_minted_unauth = await a_wallet.try_sale_manage_nft(Principal.fromActor(sale_canister),[ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + ] + ); + + D.print("adding auth"); + + //add items as authorized + //NFT-229 + let add_minted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "1"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "2"; + }), + ] + ); + + + //NFT-230 + let add_unminted_1 = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + ] + ); + + D.print("adding second"); + + // try adding a second time + //nft-233 + let add_unminted_1_second_time = await sale_canister.manage_nfts_sale_nft_origyn([ + #add({ + canister = Principal.fromActor(canister); + token_id = "3"; + }), + #add({ + canister = Principal.fromActor(canister); + token_id = "4"; + }), + ] + ); + + //D.print("getting details"); + + //test inventory + let final_inventory = await sale_canister.get_inventory_sale_nft_origyn(null,null); + + //can get specific item + let specific_inventory = await sale_canister.get_inventory_item_sale_nft_origyn("1"); + + //D.print("running suite load nfts"); + + let suite = S.suite("test loading nfts", [ + + S.test("fail if non owner adds nfts", switch(add_minted_unauth){case(#ok(res)){"unexpected success" # debug_show(res)};case(#err(err)){ + if(err.number == 2000){ // + "correct number" + } else{ + "wrong error " # debug_show(err); + }};}, M.equals(T.text("correct number"))), //NFT-231 + S.test("can add minted items", switch(add_minted_1){case(#ok(res)){ + if(res.total_size == 2 and res.items.size()==2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-229 + S.test("can add unminted items", switch(add_unminted_1){case(#ok(res)){ + if(res.total_size == 4 and res.items.size()==2){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-230 + S.test("cant readd items", switch(add_unminted_1_second_time){case(#ok(res)){ + if(res.total_size == 4 and res.items.size()==2){ + switch(res.items[0]){ + case(#err(err)){ + "expected success" + }; + case(_){ + "unexpected success" # debug_show(res) + }; + } + + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), //NFT-233 + S.test("can get inventory", switch(final_inventory){case(#ok(res)){ + if(res.total_size == 4 and res.items.size()==4){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + S.test("can get inventory item", switch(specific_inventory){case(#ok(res)){ + if(res.token_id == "1"){ + "expected success" + } else { + "unexpected success" # debug_show(res) + }};case(#err(err)){ + "wrong error " # debug_show(err); + };}, M.equals(T.text("expected success"))), + ]); + + S.run(suite); + + return #success; + }; + +} \ No newline at end of file diff --git a/src/tests/test_runner_storage.mo b/src/tests/test_runner_storage.mo new file mode 100644 index 0000000..2008bd5 --- /dev/null +++ b/src/tests/test_runner_storage.mo @@ -0,0 +1,1139 @@ +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import C "mo:matchers/Canister"; +import Conversion "mo:candy_0_1_10/conversion"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; +import D "mo:base/Debug"; +import Blob "mo:base/Blob"; +import M "mo:matchers/Matchers"; +import StorageCanisterDef "../origyn_nft_reference/storage_canister"; +import NFTUtils "../origyn_nft_reference/utils"; +import Metadata "../origyn_nft_reference/metadata"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Properties "mo:candy_0_1_10/properties"; +import Result "mo:base/Result"; +import Nat "mo:base/Nat"; +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import TestWalletDef "test_wallet"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; +import utils "test_utils"; +//import Instant "test_runner_instant_transfer"; + + +shared (deployer) actor class test_runner(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var dip20_fee = 200_000; + + private func get_time() : Int{ + return Time.now(); + }; + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + + //let Instant_Test = await Instant.test_runner_instant_transfer(); + //D.print("in storage tezt" # debug_show(canister_factory)); + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + + let suite = S.suite("test nft", [ + S.test("testAllocation", switch(await testAllocation()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testCollectionLibrary", switch(await testCollectionLibrary()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + S.test("testLibraryPostMint", switch(await testLibraryPostMint()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testMarketTransfer", switch(await testMarketTransfer()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + //S.test("testOwnerTransfer", switch(await testOwnerTransfer()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + + ]); + S.run(suite); + + return #success; + }; + + public shared func testLibraryPostMint() : async {#success; #fail : Text} { + //D.print("running testAllocation"); + + + //D.print("have new principal " # debug_show(newPrincipal)); + + let newPrincipal_b = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = ?4096000; + }); + + + //D.print("have new principal " # debug_show(newPrincipal_b)); + + let canister_b : Types.Service = actor(Principal.toText(newPrincipal_b)); + + D.print("making a storage container"); + let storage_b = await StorageCanisterDef.Storage_Canister({ + gateway_canister = Principal.fromActor(canister_b); + network = null; + storage_space = ?4096000; + }); + + let storage_c = await StorageCanisterDef.Storage_Canister({ + gateway_canister = Principal.fromActor(canister_b); + network = null; + storage_space = ?4096000; + }); + + //D.print("have new storage " # debug_show(newPrincipal)); + + let new_storage_request = await canister_b.manage_storage_nft_origyn(#add_storage_canisters([ + (Principal.fromActor(storage_b), 4096000, (0,0,1)) + ])); + + let new_storage_request2 = await canister_b.manage_storage_nft_origyn(#add_storage_canisters([ + (Principal.fromActor(storage_c), 4096000, (0,0,1)) + ])); + + D.print("calling storage stuff"); + + + let standardStage = await utils.buildStandardNFT("1", canister_b, Principal.fromActor(canister_b), 2048000, false); + //let standardStage2 = await utils.buildStandardNFT("2", canister_b, Principal.fromActor(canister_b), 2048000, false); + //let standardStage3 = await utils.buildStandardNFT("3", canister_b, Principal.fromActor(canister_b), 2048000, false); + //let standardStage4 = await utils.buildStandardNFT("4", canister_b, Principal.fromActor(canister_b), 2048000, false); + + //mint 2 + let mint_attempt1 = await canister_b.mint_nft_origyn("1", #principal(Principal.fromActor(this))); + D.print("mint attempt result " # debug_show(mint_attempt1)); + ///let mint_attempt2 = await canister_b.mint_nft_origyn("2", #principal(Principal.fromActor(this))); + //mint 2 + //let mint_attempt3 = await canister_b.mint_nft_origyn("3", #principal(Principal.fromActor(this))); + //let mint_attempt4 = await canister_b.mint_nft_origyn("4", #principal(Principal.fromActor(this))); + + + //try to add a library + + let library_add = await canister_b.stage_library_nft_origyn({ + token_id = "1" : Text; + library_id = "aftermint" : Text; + filedata = #Class([ + {name = "library_id"; value=#Text("aftermint"); immutable= true}, + {name = "title"; value=#Text("page"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("https://" # Principal.toText(Principal.fromActor(canister_b)) # ".raw.ic0.app/_/1/_/page"); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(2048000); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name ="read";value = #Text("public"); immutable = false} + ]); + chunk = 0; + content = Conversion.valueToBlob(#Text("after mint")); + }); + + + + let get_gatway_chunks = await canister_b.chunk_nft_origyn({ + token_id = "1"; + library_id = "aftermint"; + chunk = ?0; + }); + + let storage_metrics_canister = await canister_b.storage_info_nft_origyn(); + let storage_metrics_storageb = await storage_b.storage_info_nft_origyn(); + let storage_metrics_storagec = await storage_c.storage_info_nft_origyn(); + + + + + + let suite = S.suite("test library post mint for NFT", [ + + S.test("can stage library after mint", + + switch(library_add){ + case(#ok(res)){ + + + "expected success"; + + + }; + case(#err(err)){ + "wrong error " # debug_show(err); + }; + + }, M.equals(T.text("expected success"))), + S.test("can get loaded aset", + + switch(get_gatway_chunks){ + case(#ok(res)){ + + D.print("gateway chunk" # debug_show(res)); + "expected success"; + + + }; + case(#err(err)){ + "wrong error " # debug_show(err); + }; + + }, M.equals(T.text("expected success"))), + + /* S.test("staging with non enough space should fail", + + switch(standardStage.0){ + case(#ok(res)){ + "unexpected success"; + }; + case(#err(err)){ + if(err.number == 1001){ + "expected error"; + } else { + "wrong error " # debug_show(standardStage); + }; + }; + + }, M.equals(T.text("expected error"))), + S.test("can provide more storage to canister", switch(new_storage_request){ + case(#ok(res)){ + //D.print("found blind market response"); + //D.print(debug_show(res)); + switch(res){ + case(#add_storage_canisters(val)){ + if(val.0 == 8192000 and val.1 == 8192000){ + "space matches" + } else { + "bad size " # debug_show(new_storage_request); + }; + }; + case(_){ + "bad response " # debug_show(new_storage_request); + } + }; + + }; + case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};} + , M.equals(T.text("space matches"))), //MKT0007, MKT0014 + + S.test("allocated space should be 8192000", switch(currentStateCanister_b){case( + #ok(res)){ + Nat.toText(switch(res.allocated_storage){case(null){0};case(?val){val}})}; + case(#err(err)){ + "error " # debug_show(err); + }}, M.equals(T.text("8192000"))), //NFT-225 + S.test("staging with enough space should pass", + + switch(standardStage_b.0){ + case(#ok(res)){ + "expected success"; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("staging library should put first two on gateway", + + switch(standardStage_b.1, standardStage_b.2){ + case(#ok(res), #ok(res2)){ + if((res == Principal.fromActor(canister_b)) and (res2 == Principal.fromActor(canister_b))){ + "expected success"; + } else { + "wrong principals " # debug_show(standardStage_b); + }; + }; + case(_,_){ + "wrong error " # debug_show(standardStage); + }; + + }, M.equals(T.text("expected success"))), + S.test("staging library should put third on storage", + + + switch(standardStage_b.3){ + case(#ok(res)){ + //D.print("testing what should have worked " # debug_show(standardStage_b)); + if(res == Principal.fromActor(storage_b)){ + "expected success"; + } else { + "wrong pricnipal"; + }; + }; + case(#err(err)){ + //D.print("testing what should have worked " # debug_show(standardStage_b)); + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + + S.test("available space on sorage should be 2048000", + + switch(storage_metrics_canister_b_after_stage){ + case(#ok(res)){ + if(res.allocated_storage == 4096000 and + res.available_space == 2048000){ + "expected success"; + }else { + "wrong space " # debug_show(res); + }; + + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("available space on gateway should be 0", + + switch(gateway_metrics_canister_b_after_stage){ + case(#ok(res)){ + if(res.allocated_storage == 4096000 and + res.available_space == 0){ + "expected success"; + } else { + "wrong space " # debug_show(res); + }; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("collection info should have correct info", + + switch(currentStateCanister_b){ + case(#ok(res)){ + switch(res.allocated_storage, res.available_space){ + case(?res1, ?res2){ + if(res1 == 8192000 and + res2 == 2048000){ + "expected success"; + } else { + "nope " + }; + }; + case(_,_){ + "strange null"; + }; + }; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + + S.test("can get chunks from gateway", + + switch(get_gatway_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(redirect)){"unexpected remote"}; + case(#chunk(res)){ + if(Blob.equal(res.content, Blob.fromArray(Conversion.valueToBytes(#Text("hello world"))))){ + "hello world"; + } else { + "wrong content"; + }; + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("hello world"))), + S.test("do get pointer for storage", + + switch(get_hidden_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ + if(remote_data.canister == Principal.fromActor(storage_b) and + remote_data.args.library_id == "hidden" and + remote_data.args.token_id == "1" and + (switch(remote_data.args.chunk){case(?val){val};case(null){9999}}) == 0){ + "correct redirect"; + } else { + "bad redirect " # debug_show(remote_data); + } + + }; + case(_){ + "wrong result" + } + } + }; + case(#err(err)){ + + "wrong content " # debug_show(get_hidden_chunks); + + }; + + }, M.equals(T.text("correct redirect"))), + S.test("can get chunks from storage", + + switch(get_storage_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ "unexpected remote"}; + case(#chunk(res)){ + if(Blob.equal(res.content, Blob.fromArray(Conversion.valueToBytes(#Text("hidden hello world"))))){ + "hidden hello world"; + }else { + "somthing unexpected" + }; + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("hidden hello world"))) */ + + ]); + + S.run(suite); + + return #success; + }; + + + public shared func testAllocation() : async {#success; #fail : Text} { + D.print("running testAllocation"); + + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = ?4096000; + }); + //D.print("have new principal " # debug_show(newPrincipal)); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + //D.print("have new principal " # debug_show(newPrincipal)); + + let newPrincipal_b = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = ?4096000; + }); + D.print("have new principal " # debug_show(newPrincipal_b)); + + let canister_b : Types.Service = actor(Principal.toText(newPrincipal_b)); + + let storage_b = await StorageCanisterDef.Storage_Canister({ + gateway_canister = Principal.fromActor(canister_b); + network = null; + storage_space = ?4096000; + }); + + D.print("have new storage " # debug_show(newPrincipal)); + + + D.print("calling storage stuff allocations"); + + let initialCanisterSpace = await canister.storage_info_nft_origyn(); + + let standardStage = await utils.buildStandardNFT("1", canister, Principal.fromActor(canister), 2048000, false); + + D.print("standardStage" # debug_show(standardStage)); + + + + let new_storage_request = await canister_b.manage_storage_nft_origyn(#add_storage_canisters([ + (Principal.fromActor(storage_b), 4096000, (0,0,1)) + ])); + + D.print("new_storage_request" # debug_show(new_storage_request)); + + D.print("staging b"); + let standardStage_b = await utils.buildStandardNFT("1", canister_b, Principal.fromActor(canister_b), 2048000, false); + D.print("DONE staging b " # debug_show(standardStage_b)); + let currentStateToken = await canister.nft_origyn("1"); + + let currentStateCanister = await canister.collection_nft_origyn(null); + + let currentStateCanister_b = await canister_b.collection_nft_origyn(null); + + let storage_metrics_canister_b_after_stage = await storage_b.storage_info_nft_origyn(); + + let gateway_metrics_canister_b_after_stage = await canister_b.storage_info_nft_origyn(); + + let get_gatway_chunks = await canister_b.chunk_nft_origyn({ + token_id = "1"; + library_id = "page"; + chunk = ?0; + }); + + + let get_hidden_chunks = await canister_b.chunk_nft_origyn({ + token_id = "1"; + library_id = "hidden"; + chunk = ?0; + }); + + let storage_actor : Types.StorageService = actor(Principal.toText(Principal.fromActor(storage_b))); + let get_storage_chunks = await storage_actor.chunk_nft_origyn({ + token_id = "1"; + library_id = "hidden"; + chunk = ?0; + }); + + let suite = S.suite("test allocation for NFT", [ + + S.test("available space on canister should be", switch(initialCanisterSpace){case( + #ok(res)){ + Nat.toText(res.available_space)}; + case(#err(err)){ + "error " # debug_show(err); + }}, M.equals(T.text("4096000"))), + S.test("staging with non enough space should fail", + + switch(standardStage.0){ + case(#ok(res)){ + "unexpected success"; + }; + case(#err(err)){ + if(err.number == 1001){ + "expected error"; + } else { + "wrong error " # debug_show(standardStage); + }; + }; + + }, M.equals(T.text("expected error"))), + S.test("can provide more storage to canister", switch(new_storage_request){ + case(#ok(res)){ + //D.print("found blind market response"); + //D.print(debug_show(res)); + switch(res){ + case(#add_storage_canisters(val)){ + if(val.0 == 8192000 and val.1 == 8192000){ + "space matches" + } else { + "bad size " # debug_show(new_storage_request); + }; + }; + /* case(_){ + "bad response " # debug_show(new_storage_request); + } */ + }; + + }; + case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};} + , M.equals(T.text("space matches"))), //MKT0007, MKT0014 + + S.test("allocated space should be 8192000", switch(currentStateCanister_b){case( + #ok(res)){ + Nat.toText(switch(res.allocated_storage){case(null){0};case(?val){val}})}; + case(#err(err)){ + "error " # debug_show(err); + }}, M.equals(T.text("8192000"))), //NFT-225 + S.test("staging with enough space should pass", + + switch(standardStage_b.0){ + case(#ok(res)){ + "expected success"; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("staging library should put first two on gateway", + + switch(standardStage_b.1, standardStage_b.2){ + case(#ok(res), #ok(res2)){ + if((res == Principal.fromActor(canister_b)) and (res2 == Principal.fromActor(canister_b))){ + "expected success"; + } else { + "wrong principals " # debug_show(standardStage_b); + }; + }; + case(_,_){ + "wrong error " # debug_show(standardStage); + }; + + }, M.equals(T.text("expected success"))), + S.test("staging library should put third on storage", + + + switch(standardStage_b.3){ + case(#ok(res)){ + //D.print("testing what should have worked " # debug_show(standardStage_b)); + if(res == Principal.fromActor(storage_b)){ + "expected success"; + } else { + "wrong pricnipal"; + }; + }; + case(#err(err)){ + //D.print("testing what should have worked " # debug_show(standardStage_b)); + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + + S.test("available space on sorage should be 2048000", + + switch(storage_metrics_canister_b_after_stage){ + case(#ok(res)){ + if(res.allocated_storage == 4096000 and + res.available_space == 2048000){ + "expected success"; + }else { + "wrong space " # debug_show(res); + }; + + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("available space on gateway should be 0", + + switch(gateway_metrics_canister_b_after_stage){ + case(#ok(res)){ + if(res.allocated_storage == 4096000 and + res.available_space == 0){ + "expected success"; + } else { + "wrong space " # debug_show(res); + }; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("collection info should have correct info", + + switch(currentStateCanister_b){ + case(#ok(res)){ + switch(res.allocated_storage, res.available_space){ + case(?res1, ?res2){ + if(res1 == 8192000 and + res2 == 2048000){ + "expected success"; + } else { + "nope " + }; + }; + case(_,_){ + "strange null"; + }; + }; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + + S.test("can get chunks from gateway", + + switch(get_gatway_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(redirect)){"unexpected remote"}; + case(#chunk(res)){ + if(Blob.equal(res.content, Blob.fromArray(Conversion.valueToBytes(#Text("hello world"))))){ + "hello world"; + } else { + "wrong content"; + }; + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("hello world"))), + S.test("do get pointer for storage", + + switch(get_hidden_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ + if(remote_data.canister == Principal.fromActor(storage_b) and + remote_data.args.library_id == "hidden" and + remote_data.args.token_id == "1" and + (switch(remote_data.args.chunk){case(?val){val};case(null){9999}}) == 0){ + "correct redirect"; + } else { + "bad redirect " # debug_show(remote_data); + } + + }; + case(_){ + "wrong result" + } + } + }; + case(#err(err)){ + + "wrong content " # debug_show(get_hidden_chunks); + + }; + + }, M.equals(T.text("correct redirect"))), + S.test("can get chunks from storage", + + switch(get_storage_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ "unexpected remote"}; + case(#chunk(res)){ + if(Blob.equal(res.content, Blob.fromArray(Conversion.valueToBytes(#Text("hidden hello world"))))){ + "hidden hello world"; + }else { + "somthing unexpected" + }; + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("hidden hello world"))) + + ]); + + S.run(suite); + + return #success; + }; + + + public shared func testCollectionLibrary() : async {#success; #fail : Text} { + //D.print("running testMarketTransfer"); + + let newPrincipal = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = ?4096000; + }); + + let canister : Types.Service = actor(Principal.toText(newPrincipal)); + + + + let newPrincipal_b = await g_canister_factory.create({ + owner = Principal.fromActor(this); + storage_space = ?4096000; + }); + + let canister_b : Types.Service = actor(Principal.toText(newPrincipal_b)); + + let storage_b = await StorageCanisterDef.Storage_Canister({ + gateway_canister = Principal.fromActor(canister_b); + network = null; + storage_space = ?4096000; + }); + + + + //D.print("calling stage"); + + let initialCanisterSpace = await canister.storage_info_nft_origyn(); + + let standardStage = await utils.buildStandardNFT("", canister, Principal.fromActor(canister), 2048000, false); + + + + let new_storage_request = await canister_b.manage_storage_nft_origyn(#add_storage_canisters([ + (Principal.fromActor(storage_b), 4096000, (0,0,1)) + ])); + + //D.print("staging b"); + let standardStage_b = await utils.buildStandardNFT("1", canister_b, Principal.fromActor(canister_b), 2048000, false); + //D.print("DONE staging b " # debug_show(standardStage_b)); + + let standardStage_b_collection = await utils.buildCollection( canister_b, Principal.fromActor(canister_b), Principal.fromActor(canister_b), Principal.fromActor(canister_b), 2048000); + //D.print("DONE staging b " # debug_show(standardStage_b)); + + let mint_attempt = await canister_b.mint_nft_origyn("1", #principal(Principal.fromActor(this))); + + let currentStateToken = await canister.nft_origyn(""); + + let currentStateCanister = await canister.collection_nft_origyn(null); + + let currentStateCanister_b = await canister_b.collection_nft_origyn(null); + + let storage_metrics_canister_b_after_stage = await storage_b.storage_info_nft_origyn(); + + let gateway_metrics_canister_b_after_stage = await canister_b.storage_info_nft_origyn(); + + let get_gatway_chunks = await canister_b.chunk_nft_origyn({ + token_id = "1"; + library_id = "page"; + chunk = ?0; + }); + + + let get_hidden_chunks = await canister_b.chunk_nft_origyn({ + token_id = "1"; + library_id = "hidden"; + chunk = ?0; + }); + + let get_gatway_chunks_collection = await canister_b.chunk_nft_origyn({ + token_id = "1"; + library_id = "collection_banner"; + chunk = ?0; + }); + + let storage_actor : Types.StorageService = actor(Principal.toText(Principal.fromActor(storage_b))); + let get_storage_chunks = await storage_actor.chunk_nft_origyn({ + token_id = "1"; + library_id = "hidden"; + chunk = ?0; + }); + + + let get_storage_chunks_banner = await storage_actor.chunk_nft_origyn({ + token_id = ""; + library_id = "collection_banner"; + chunk = ?0; + }); + + let suite = S.suite("test collection allocation", [ + + S.test("available space on canister should be collection", switch(initialCanisterSpace){case( + #ok(res)){ + Nat.toText(res.available_space)}; + case(#err(err)){ + "error " # debug_show(err); + }}, M.equals(T.text("4096000"))), + S.test("staging with non enough space should fail collection", + + switch(standardStage.0){ + case(#ok(res)){ + "unexpected success"; + }; + case(#err(err)){ + if(err.number == 1001){ + "expected error"; + } else { + "wrong error " # debug_show(standardStage); + }; + }; + + }, M.equals(T.text("expected error"))), + S.test("can provide more storage to canister collection", switch(new_storage_request){ + case(#ok(res)){ + //D.print("found blind market response"); + //D.print(debug_show(res)); + switch(res){ + case(#add_storage_canisters(val)){ + if(val.0 == 8192000 and val.1 == 8192000){ + "space matches" + } else { + "bad size " # debug_show(new_storage_request); + }; + }; + /* case(_){ + "bad response " # debug_show(new_storage_request); + } */ + }; + + }; + case(#err(err)){"unexpected error: " # err.flag_point # debug_show(err)};} + , M.equals(T.text("space matches"))), //MKT0007, MKT0014 + + S.test("allocated space should be 8192000 collection", switch(currentStateCanister_b){case( + #ok(res)){ + Nat.toText(switch(res.allocated_storage){case(null){0};case(?val){val}})}; + case(#err(err)){ + "error " # debug_show(err); + }}, M.equals(T.text("8192000"))), //NFT-225 + S.test("staging with enough space should pass collection", + + switch(standardStage_b.0){ + case(#ok(res)){ + "expected success"; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("staging library should put first two on gateway collection ", + + switch(standardStage_b.1, standardStage_b.2){ + case(#ok(res), #ok(res2)){ + if((res == Principal.fromActor(canister_b)) and (res2 == Principal.fromActor(canister_b))){ + "expected success"; + } else { + "wrong principals " # debug_show(standardStage_b); + }; + }; + case(_,_){ + "wrong error " # debug_show(standardStage); + }; + + }, M.equals(T.text("expected success"))), + S.test("staging library should put third on storage collection", + + + switch(standardStage_b.3){ + case(#ok(res)){ + //D.print("testing what should have worked " # debug_show(standardStage_b)); + if(res == Principal.fromActor(storage_b)){ + "expected success"; + } else { + "wrong pricnipal"; + }; + }; + case(#err(err)){ + //D.print("testing what should have worked " # debug_show(standardStage_b)); + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + + S.test("available space on sorage should be 2048000 collection", + + switch(storage_metrics_canister_b_after_stage){ + case(#ok(res)){ + if(res.allocated_storage == 4096000 and + res.available_space == 0){ + "expected success"; + }else { + "wrong space " # debug_show(res); + }; + + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("available space on gateway should be 0 collection", + + switch(gateway_metrics_canister_b_after_stage){ + case(#ok(res)){ + if(res.allocated_storage == 4096000 and + res.available_space == 0){ + "expected success"; + } else { + "wrong space " # debug_show(res); + }; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + S.test("collection info should have correct info collection", + + switch(currentStateCanister_b){ + case(#ok(res)){ + switch(res.allocated_storage, res.available_space){ + case(?res1, ?res2){ + if(res1 == 8192000 and + res2 == 0){ + "expected success"; + } else { + "nope " + }; + }; + case(_,_){ + "strange null"; + }; + }; + }; + case(#err(err)){ + + "wrong error " # debug_show(standardStage); + + }; + + }, M.equals(T.text("expected success"))), + + S.test("can get chunks from gateway collection", + + switch(get_gatway_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(redirect)){"unexpected remote"}; + case(#chunk(res)){ + if(Blob.equal(res.content, Blob.fromArray(Conversion.valueToBytes(#Text("hello world"))))){ + "hello world"; + } else { + "wrong content"; + }; + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("hello world"))), + S.test("can get chunks from gateway for collection collection", + + + switch(get_gatway_chunks_collection){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ + if(remote_data.canister == Principal.fromActor(storage_b) and + remote_data.args.library_id == "collection_banner" and + remote_data.args.token_id == "" and + (switch(remote_data.args.chunk){case(?val){val};case(null){9999}}) == 0){ + "correct redirect"; + } else { + "bad redirect " # debug_show(remote_data); + } + + }; + case(_){ + "wrong result" + } + } + }; + case(#err(err)){ + + "wrong content " # debug_show(get_hidden_chunks); + + }; + + }, M.equals(T.text("correct redirect"))), + + + + S.test("do get pointer for storage collection", + + switch(get_hidden_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ + if(remote_data.canister == Principal.fromActor(storage_b) and + remote_data.args.library_id == "hidden" and + remote_data.args.token_id == "1" and + (switch(remote_data.args.chunk){case(?val){val};case(null){9999}}) == 0){ + "correct redirect"; + } else { + "bad redirect " # debug_show(remote_data); + } + + }; + case(_){ + "wrong result" + } + } + }; + case(#err(err)){ + + "wrong content " # debug_show(get_hidden_chunks); + + }; + + }, M.equals(T.text("correct redirect"))), + S.test("can get chunks from storage collection", + + switch(get_storage_chunks){ + case(#ok(res)){ + switch(res){ + case(#remote(remote_data)){ "unexpected remote"}; + case(#chunk(res)){ + if(Blob.equal(res.content, Blob.fromArray(Conversion.valueToBytes(#Text("hidden hello world"))))){ + "hidden hello world"; + }else { + "somthing unexpected" + }; + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("hidden hello world"))), + S.test("can get chunks from collection collection", + + switch(get_storage_chunks_banner){ + case(#ok(res)){ + //D.print("the res from storage canister " #debug_show(res)); + switch(res){ + case(#remote(remote_data)){ "unexpected remote"}; + case(#chunk(res)){ + Conversion.bytesToText(Blob.toArray(res.content)); + }; + }; + }; + case(#err(err)){ + + "wrong content " # debug_show(get_gatway_chunks); + + }; + + }, M.equals(T.text("collection banner"))), + + + ]); + + + S.run(suite); + + return #success; + }; + + + +} \ No newline at end of file diff --git a/src/tests/test_runner_utils.mo b/src/tests/test_runner_utils.mo new file mode 100644 index 0000000..b88c808 --- /dev/null +++ b/src/tests/test_runner_utils.mo @@ -0,0 +1,89 @@ + +import AccountIdentifier "mo:principalmo/AccountIdentifier"; +import Array "mo:base/Array"; +import C "mo:matchers/Canister"; +//import CandyType "mo:candy_0_1_10/types"; +import CandyTypes "mo:candy_0_1_10/types"; +import D "mo:base/Debug"; +import Iter "mo:base/Iter"; +import M "mo:matchers/Matchers"; +import NFTUtils "../origyn_nft_reference/utils"; +import Nat64 "mo:base/Nat64"; +import Option "mo:base/Option"; +import Principal "mo:base/Principal"; +import Result "mo:base/Result"; +import S "mo:matchers/Suite"; +import T "mo:matchers/Testable"; +import Time "mo:base/Time"; +import Types "../origyn_nft_reference/types"; + + + +shared (deployer) actor class test_runner(dfx_ledger: Principal, dfx_ledger2: Principal) = this { + let it = C.Tester({ batchSize = 8 }); + + + private var DAY_LENGTH = 60 * 60 * 24 * 10 ** 9; + private var dip20_fee = 200_000; + + private func get_time() : Int{ + return Time.now(); + }; + + private type canister_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + private type storage_factory_actor = actor { + create : ({owner: Principal; storage_space: ?Nat}) -> async Principal; + }; + + private var g_canister_factory : canister_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + private var g_storage_factory: storage_factory_actor = actor(Principal.toText(Principal.fromBlob("\04"))); + + + + public shared func test(canister_factory : Principal, storage_factory: Principal) : async {#success; #fail : Text} { + g_canister_factory := actor(Principal.toText(canister_factory)); + g_storage_factory := actor(Principal.toText(storage_factory)); + + let suite = S.suite("test nft", [ + S.test("testNFTUtils", switch(await testNFTUtils()){case(#success){true};case(_){false};}, M.equals(T.bool(true))), + ]); + S.run(suite); + + return #success; + }; + + + public shared func testNFTUtils() : async {#success; #fail : Text} { + //D.print("running testNFTUtils"); + + let theNat = NFTUtils.get_token_id_as_nat("1"); + //D.print(debug_show(theNat)); + let theText = NFTUtils.get_nat_as_token_id(theNat); + //D.print("the text should be back"); + //D.print(theText); + + + let theNat2 = NFTUtils.get_token_id_as_nat("com.origyn.nft.SomethingFunky"); + D.print(debug_show(theNat2)); + let theText2 = NFTUtils.get_nat_as_token_id(theNat2); + D.print("the text should be back"); + D.print(theText2); + + + + //test balances + //D.print("made it here"); + let suite = S.suite("test market Nft", [ + S.test("id is converted", theText, M.equals(T.text("1"))), + S.test("id is converted 2", theText2, M.equals(T.text("com.origyn.nft.SomethingFunky"))), + ]); + //D.print("about to run"); + S.run(suite); + //D.print("returning"); + return #success; + }; + + +} \ No newline at end of file diff --git a/src/tests/test_utils.mo b/src/tests/test_utils.mo new file mode 100644 index 0000000..3f25c6c --- /dev/null +++ b/src/tests/test_utils.mo @@ -0,0 +1,406 @@ +/* + + +import Iter "mo:base/Iter"; +import Option "mo:base/Option"; + +import Text "mo:base/Text"; +import Properties "mo:candy_0_1_10/properties"; +import Workspace "mo:candy_0_1_10/workspace"; +import TrieMap "mo:base/TrieMap"; + +import Buffer "mo:base/Buffer"; +import Time "mo:base/Time"; + */ + + import NFTCanisterDef "../origyn_nft_reference/main"; + import D "mo:base/Debug"; + import Result "mo:base/Result"; + import Types "../origyn_nft_reference/types"; + import CandyTypes "mo:candy_0_1_10/types"; + import Principal "mo:base/Principal"; + +import Conversion "mo:candy_0_1_10/conversion"; + + + +module { + + public func buildStandardNFT(token_id: Text, canister: Types.Service, app: Principal, file_size: Nat, is_soulbound: Bool) : async ( + Result.Result, + Result.Result, + Result.Result, + Result.Result) { + //D.print("calling stage in build standard"); + + let stage = await canister.stage_nft_origyn(standardNFT(token_id, Principal.fromActor(canister), app, file_size, is_soulbound)); + //D.print(debug_show(stage)); + //D.print("finished stage in build standard"); + + let fileStage = await canister.stage_library_nft_origyn(standardFileChunk(token_id,"page","hello world")); + //D.print("finished filestage1 in build standard"); + //D.print(debug_show(fileStage)); + let previewStage = await canister.stage_library_nft_origyn(standardFileChunk(token_id,"preview","preview hello world")); + //D.print("finished filestage2 in build standard"); + //D.print(debug_show(previewStage)); + let hiddenStage = await canister.stage_library_nft_origyn(standardFileChunk(token_id,"hidden","hidden hello world")); + //D.print("finished filestage3 in build standard"); + //D.print(debug_show(hiddenStage)); + + return (stage, switch(fileStage){case(#ok(val)){#ok(val.canister)};case(#err(err)){#err(err)};}, switch(previewStage){case(#ok(val)){#ok(val.canister)};case(#err(err)){#err(err)};}, switch(hiddenStage){case(#ok(val)){#ok(val.canister)};case(#err(err)){#err(err)};}); + }; + + + public func buildCollection(canister: Types.Service, app: Principal, node: Principal, originator: Principal, file_size: Nat) : async ( + Result.Result, + Result.Result) { + //D.print("calling stage in build standard"); + + let stage = await canister.stage_nft_origyn(standardCollection(Principal.fromActor(canister), app, node, originator, file_size)); + //D.print(debug_show(stage)); + //D.print("finished stage in build standard"); + + let fileStage = await canister.stage_library_nft_origyn(standardFileChunk("","collection_banner","collection banner")); + + + return (stage, switch(fileStage){case(#ok(val)){#ok(val.canister)};case(#err(err)){#err(err)};}); + }; + + public func standardNFT( + token_id: Text, + canister : Principal, + app: Principal, + file_size: Nat, + is_soulbound: Bool) : {metadata : CandyTypes.CandyValue} { + {metadata = #Class([ + {name = "id"; value=#Text(token_id); immutable= true}, + {name = "primary_asset"; value=#Text("page"); immutable= true}, + {name = "preview"; value=#Text("page"); immutable= true}, + {name = "experience"; value=#Text("page"); immutable= true}, + {name = "library"; value=#Array(#thawed([ + #Class([ + {name = "library_id"; value=#Text("page"); immutable= true}, + {name = "title"; value=#Text("page"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true},// ipfs, arweave, portal + {name = "location"; value=#Text("http://localhost:8000/-/1/-/page?canisterId=" # Principal.toText(canister)); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(file_size); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name = "read"; value=#Text("public"); immutable=false;}, + ]), + #Class([ + {name = "library_id"; value=#Text("preview"); immutable= true}, + {name = "title"; value=#Text("preview"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("http://localhost:8000/-/1/-/preview?canisterId=" # Principal.toText(canister)); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(file_size); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name = "read"; value=#Text("public"); immutable=false;}, + ]), + #Class([ + {name = "library_id"; value=#Text("hidden"); immutable= true}, + {name = "title"; value=#Text("hidden"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("http://localhost:8000/-/1/-/hidden?canisterId=" # Principal.toText(canister) ); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(file_size); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name = "read"; value=#Text("public");immutable=false;}, + ]), + #Class([ + {name = "library_id"; value=#Text("collection_banner"); immutable= true}, + {name = "title"; value=#Text("collection_banner"); immutable= true}, + {name = "location_type"; value=#Text("collection"); immutable= true}, + {name = "location"; value=#Text("http://localhost:8000/-/1/-/collection_banner?canisterId=" # Principal.toText(canister)); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(file_size); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name = "read"; value=#Text("public");immutable=false;}, + ]) + ])); immutable= false}, + {name="__apps"; value=#Array(#thawed([ + #Class([ + {name = Types.metadata.__apps_app_id; value=#Text("com.test.__public"); immutable= true}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "permissions"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "data"; value=#Class([ + {name = "val1"; value=#Text("val1"); immutable= false}, + {name = "val2"; value=#Text("val2"); immutable= false}, + {name = "val3"; value=#Class([ + {name = "data"; value=#Text("val3"); immutable= false}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}, + {name = "val4"; value=#Class([ + {name = "data"; value=#Text("val4"); immutable= false}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}]); + immutable=false;} + ]), + #Class([ + {name = Types.metadata.__apps_app_id; value=#Text("com.test.__private"); immutable= true}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "permissions"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "data"; value=#Class([ + {name = "val1"; value=#Text("val1"); immutable= false}, + {name = "val2"; value=#Text("val2"); immutable= false}, + {name = "val3"; value=#Class([ + {name = "data"; value=#Text("val3"); immutable= false}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}, + {name = "val4"; value=#Class([ + {name = "data"; value=#Text("val4"); immutable= false}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}]); + immutable=false;} + ]) + ] + )); + immutable=false;}, + {name = "primary_host"; value=#Text("localhost"); immutable= false}, + {name = "primary_port"; value=#Text("8000"); immutable= false}, + {name = "primary_protcol"; value=#Text("http"); immutable= false}, + + {name = "owner"; value=#Principal(canister); immutable= false}, + {name = "is_soulbound"; value=#Bool(is_soulbound); immutable = is_soulbound}, + ])} + }; + + + public func standardCollection( + canister : Principal, + app: Principal, + node: Principal, + originator: Principal, + file_size: Nat, + ) : {metadata : CandyTypes.CandyValue} { + {metadata = #Class([ + {name = "id"; value=#Text(""); immutable= true}, + {name = "primary_asset"; value=#Text("collection_banner"); immutable= true}, + {name = "preview"; value=#Text("collection_banner"); immutable= true}, + {name = "experience"; value=#Text("collection_banner"); immutable= true}, + {name = "com.origyn.node"; value=#Principal(node); immutable= true}, + {name = "com.origyn.originator"; value=#Principal(originator); immutable= true}, + {name = "com.origyn.royalties.primary.default"; value=#Array(#frozen([ + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.broker"); immutable= true}, + {name = "rate"; value=#Float(0.06); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.node"); immutable= true}, + {name = "rate"; value=#Float(0.07777); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.network"); immutable= true}, + {name = "rate"; value=#Float(0.005); immutable= true} + ]), + + ])); immutable= false}, + {name = "com.origyn.royalties.secondary.default"; value=#Array(#frozen([ + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.broker"); immutable= true}, + {name = "rate"; value=#Float(0.01); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.node"); immutable= true}, + {name = "rate"; value=#Float(0.02); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.originator"); immutable= true}, + {name = "rate"; value=#Float(0.03333333333); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.custom"); immutable= true}, + {name = "rate"; value=#Float(0.04); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.broker"); immutable= true}, + {name = "rate"; value=#Float(0.04); immutable= true} + ]), + #Class([ + {name = "tag"; value=#Text("com.origyn.royalty.network"); immutable= true}, + {name = "rate"; value=#Float(0.005); immutable= true} + ]), + ])); immutable= false}, + {name = "library"; value=#Array(#thawed([ + #Class([ + {name = "library_id"; value=#Text("collection_banner"); immutable= true}, + {name = "title"; value=#Text("collection_banner"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true},// ipfs, arweave, portal + {name = "location"; value=#Text("https://" # Principal.toText(canister) # ".raw.ic0.app/collection/-/collection_banner"); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(file_size); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + {name = "read"; value=#Text("public"); immutable=false;}, + ]) + ])); immutable= false}, + {name="__apps"; value=#Array(#thawed([ + #Class([ + {name = Types.metadata.__apps_app_id; value=#Text("com.test.__public"); immutable= true}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "permissions"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "data"; value=#Class([ + {name = "val1"; value=#Text("val1"); immutable= false}, + {name = "val2"; value=#Text("val2"); immutable= false}, + {name = "val3"; value=#Class([ + {name = "data"; value=#Text("val3"); immutable= false}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}, + {name = "val4"; value=#Class([ + {name = "data"; value=#Text("val4"); immutable= false}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}]); + immutable=false;} + ]), + #Class([ + {name = Types.metadata.__apps_app_id; value=#Text("com.test.__private"); immutable= true}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "permissions"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "data"; value=#Class([ + {name = "val1"; value=#Text("val1"); immutable= false}, + {name = "val2"; value=#Text("val2"); immutable= false}, + {name = "val3"; value=#Class([ + {name = "data"; value=#Text("val3"); immutable= false}, + {name = "read"; value=#Text("public"); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}, + {name = "val4"; value=#Class([ + {name = "data"; value=#Text("val4"); immutable= false}, + {name = "read"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}, + {name = "write"; value=#Class([ + {name = "type"; value=#Text("allow"); immutable= false}, + {name = "list"; value=#Array(#thawed([#Principal(app)])); + immutable=false;}]); + immutable=false;}]); + immutable=false;}]); + immutable=false;} + ]) + ] + )); + immutable=false;}, + {name = "owner"; value=#Principal(canister); immutable= false}, + {name = "is_soulbound"; value=#Bool(false); immutable = false}, + {name = "primary_host"; value=#Text("localhost"); immutable= false}, + {name = "primary_port"; value=#Text("8000"); immutable= false}, + {name = "primary_protcol"; value=#Text("http"); immutable= false}, + {name = "owner"; value=#Principal(canister); immutable= false} + ])} + }; + + public func standardFileChunk(token_id: Text, library_id: Text, text: Text) : Types.StageChunkArg{ + { + token_id = token_id : Text; + library_id = library_id : Text; + filedata = #Empty; + chunk = 0; + content = Conversion.valueToBlob(#Text(text));// content = #Bytes(nat8array); + } + }; + + +} \ No newline at end of file diff --git a/src/tests/test_wallet.mo b/src/tests/test_wallet.mo new file mode 100644 index 0000000..5ee8f3a --- /dev/null +++ b/src/tests/test_wallet.mo @@ -0,0 +1,870 @@ + + +import CandyType "mo:candy_0_1_10/types"; +import AccountIdentifier "mo:principalmo/AccountIdentifier"; + +import D "mo:base/Debug"; +import Principal "mo:base/Principal"; +import Result "mo:base/Result"; +import Blob "mo:base/Blob"; +import Time "mo:base/Time"; +import Text "mo:base/Text"; +import Int "mo:base/Int"; +import Nat64 "mo:base/Nat64"; +import Nat32 "mo:base/Nat32"; +import Types "../origyn_nft_reference/types"; +import SaleTypes "../origyn_sale_reference/types"; +import DFXTypes "../origyn_nft_reference/dfxtypes"; + +shared (deployer) actor class test_wallet() = this { + + let debug_channel= { + throws = true; + deposit_info = true; + }; + + public type Operation = { + #mint; + #burn; + #transfer; + #transferFrom; + #approve; + }; + public type TransactionStatus = { + #succeeded; + #inprogress; + #failed; + }; + + public type TxReceipt = { + #Ok: Nat; + #Err: { + #InsufficientAllowance; + #InsufficientBalance; + #ErrorOperationStyle; + #Unauthorized; + #LedgerTrap; + #ErrorTo; + #Other: Text; + #BlockUsed; + #AmountTooSmall; + }; + }; + + public type TxRecord = { + caller: ?Principal; + op: Operation; + index: Nat; + from: Principal; + to: Principal; + amount: Nat; + fee: Nat; + timestamp: Time.Time; + status: TransactionStatus; + }; + + public type AccountBalanceArgs = { + account: Blob; + }; + + public type Tokens = { + e8s : Nat64; + }; + + public type ledgerService = actor { + account_balance : query (AccountBalanceArgs) -> async Tokens; + transfer : (to: Principal, value: Nat) -> async TxReceipt; + getTransaction : (id: Nat) -> async TxRecord; + approve : (spender: Principal, value: Nat) -> async TxReceipt; + transferFrom: (from: Principal, to: Principal, value: Nat) -> async TxReceipt; + }; + + public shared func try_get_chunk(canister: Principal, token_id: Text, library_id: Text, chunk: Nat) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + switch(await acanister.chunk_nft_origyn({token_id = token_id; library_id = library_id; chunk = ?chunk;})){ + case(#ok(result)){ + switch(result){ + case(#remote(redirect)){return #err("found remote item")}; + case(#chunk(result))return #ok(result.content); + }; + + + }; + case(#err(theerror)){ + return #err("An error occured: " # debug_show(theerror)); + }; + }; + + }; + + + public shared func try_get_nft(canister: Principal, token_id: Text) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + switch(await acanister.nft_origyn(token_id)){ + case(#ok(result)){ + //D.print("Retrieved an nft from a wallet"); + //D.print(debug_show(result)); + return #ok(result); + + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + + public shared func try_publish_meta(canister: Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + let stage = await acanister.stage_nft_origyn({metadata = #Class([ + {name = "id"; value=#Text("1"); immutable= true}, + {name = "primary_asset"; value=#Text("page"); immutable= true}, + {name = "preview"; value=#Text("page"); immutable= true}, + {name = "experience"; value=#Text("page"); immutable= true}, + {name = "library"; value=#Array(#thawed([ + #Class([ + {name = "id"; value=#Text("page"); immutable= true}, + {name = "title"; value=#Text("page"); immutable= true}, + {name = "location_type"; value=#Text("canister"); immutable= true}, + {name = "location"; value=#Text("https://" # Principal.toText(Principal.fromActor(acanister)) # ".raw.ic0.app/_/1/_/page"); immutable= true}, + {name = "content_type"; value=#Text("text/html; charset=UTF-8"); immutable= true}, + {name = "content_hash"; value=#Bytes(#frozen([0,0,0,0])); immutable= true}, + {name = "size"; value=#Nat(4); immutable= true}, + {name = "sort"; value=#Nat(0); immutable= true}, + ]) + ])); immutable= true}, + {name = "owner"; value=#Principal(Principal.fromActor(acanister)); immutable= false} + ])}); + + switch(stage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared func try_publish_chunk(canister: Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + let fileStage = await acanister.stage_library_nft_origyn({ + token_id = "1" : Text; + library_id = "page" : Text; + filedata = #Empty; + chunk = 0; + content = Blob.fromArray([104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100]); + }); + + switch(fileStage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + + + public shared func try_get_bearer(canister: Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + let fileStage = await acanister.bearer_nft_origyn("1"); + + switch(fileStage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared func try_mint(canister: Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + let mint = await acanister.mint_nft_origyn("1", #principal(Principal.fromActor(this))); + + switch(mint){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared func try_sale_staged(current_owner: Principal, canister: Principal, ledger: Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + //D.print("caling market transfer origyn"); + let trysale = await acanister.market_transfer_nft_origyn({ + token_id = "1"; + sales_config = + { + escrow_receipt = ?{ + seller = #principal(current_owner); + buyer = #principal(Principal.fromActor(this)); + token_id = "1"; + + token = #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = 100_000_000; + }; + pricing = #instant; + broker_id = null; + }; + + }); + + //D.print(debug_show(trysale)); + + switch(trysale){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared func try_escrow_withdraw( + canister: Principal, + buyer: Principal, + ledger: Principal, + seller: Principal, + token_id: Text, + amount: Nat, + token: ?Types.TokenSpec) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + //D.print("escrow withdraw"); + let result = await acanister.sale_nft_origyn( + #withdraw(#escrow({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= token_id; + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + seller = #principal(seller); + buyer = #principal(buyer); + amount = amount; + }))); + switch(result){ + case(#ok(result)){ + switch(result){ + case(#withdraw(result)){ + return #ok(result); + }; + case(_){ + D.print("this should not have happened"); + + return #err(Types.errors(#nyi, "this should not have happened", null)); + } + } + }; + case(#err(err)){ + return #err(err); + }; + } + + }; + + + public shared func try_escrow_reject( + canister: Principal, + buyer: Principal, + ledger: Principal, + seller: Principal, + token_id: Text, + token: ?Types.TokenSpec) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + //D.print("escrow withdraw"); + let result = await acanister.sale_nft_origyn( + #withdraw(#reject({ + + token_id= token_id; + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + seller = #principal(seller); + buyer = #principal(buyer); + }))); + switch(result){ + case(#ok(result)){ + switch(result){ + case(#withdraw(result)){ + return #ok(result); + }; + case(_){ + D.print("this should not have happened"); + + return #err(Types.errors(#nyi, "this should not have happened", null)); + } + } + }; + case(#err(err)){ + return #err(err); + }; + } + + }; + + public shared func try_sale_withdraw(canister: Principal, buyer: Principal, ledger: Principal, seller: Principal, token_id: Text, amount: Nat, token: ?Types.TokenSpec) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + D.print("sale withdraw"); + let tryescrow = await acanister.sale_nft_origyn(#withdraw( + #sale({ + withdraw_to = #principal(Principal.fromActor(this)); + token_id= token_id; + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + seller = #principal(seller); + buyer = #principal(buyer); + amount = amount; + }))); + + switch(tryescrow){ + case(#ok(result)){ + switch(result){ + case(#withdraw(result)){ + return #ok(result); + }; + case(_){ + return #err(Types.errors(#nyi,"test", null)); + } + + }; + }; + case(#err(theerror)){ + return #err(theerror); + }; + + }; + + }; + + + public shared func try_deposit_refund( + canister: Principal, + ledger: Principal, + amount : Nat, + token: ?Types.TokenSpec) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + D.print("deposit refund origyn"); + let trywithdraw = await acanister.sale_nft_origyn(#withdraw(#deposit({ + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + + buyer = #principal(Principal.fromActor(this)); + amount = amount; + withdraw_to = #principal(Principal.fromActor(this)); + }))); + + D.print("trywithdraw" # debug_show(trywithdraw)); + + switch(trywithdraw){ + case(#ok(#withdraw(result))){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + case(_){ + return #err(Types.errors(#improper_interface, "should not be here", null)); + } + }; + + }; + + + public shared func try_escrow_specific_staged( + current_owner: Principal, + canister: Principal, + ledger: Principal, + block: ?Nat, + amount : Nat, + token_id : Text, + sale_id: ?Text, + token: ?Types.TokenSpec, + lock: ?Int) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + //D.print("escrow origyn"); + let tryescrow = await acanister.sale_nft_origyn(#escrow_deposit({ + token_id = token_id; + deposit = { + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + seller = #principal(current_owner); + buyer = #principal(Principal.fromActor(this)); + amount = amount; + sale_id = sale_id; + trx_id = switch(block){ + case(null){null}; + case(?block){?#nat(block)}; + }; + }; + lock_to_date = lock; + })); + + switch(tryescrow){ + case(#ok(#escrow_deposit((result)))){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + case(_){ + return #err(Types.errors(#improper_interface, "should not be here", null)); + } + }; + + }; + + public shared func try_escrow_general_staged( + current_owner: Principal, + canister: Principal, + ledger: Principal, + block: ?Nat, + amount: Nat, + token: ?Types.TokenSpec, + lock: ?Int) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + D.print("trying escrow" # debug_show(#escrow_deposit({ + token_id = ""; + deposit = { + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + seller = #principal(current_owner); + buyer = #principal(Principal.fromActor(this)); + amount = amount; + sale_id = null; + trx_id = switch(block){ + case(null){null}; + case(?val){?#nat(val)}; + }; + }; + lock_to_date = lock; + }))); + + let tryescrow = await acanister.sale_nft_origyn(#escrow_deposit({ + token_id = ""; + deposit = { + token = switch(token){ + case(null){ + #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + }; + case(?val){val}; + }; + seller = #principal(current_owner); + buyer = #principal(Principal.fromActor(this)); + amount = amount; + sale_id = null; + trx_id = switch(block){ + case(null){null}; + case(?val){?#nat(val)}; + }; + }; + lock_to_date = lock; + })); + //D.print("result for escrow was"); + //D.print(debug_show(tryescrow)); + + switch(tryescrow){ + case(#ok(result)){ + D.print("have result" # debug_show(result)); + let #escrow_deposit(aResult) = result; + return #ok(aResult); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + + public shared(msg) func send_ledger_payment(ledger: Principal, amount: Nat, to: Principal) : async Result.Result { + + let dfx : DFXTypes.Service = actor(Principal.toText(ledger)); + + let canister : Types.Service = actor(Principal.toText(to)); + + debug{if(debug_channel.throws == true){ D.print("checking deposit info in send_ledger_payment for " # debug_show(Principal.fromActor(this)))}}; + + let #ok(#deposit_info(deposit_info)) = await canister.sale_info_nft_origyn(#deposit_info(?#principal(Principal.fromActor(this)))); + + + + debug{if(debug_channel.deposit_info == true){ D.print("Have deposit info: " # debug_show(deposit_info))}}; + + let funding_result = await dfx.transfer({ + to = deposit_info.account_id; + fee = {e8s = 200_000 : Nat64}; + memo = Nat64.fromNat(Nat32.toNat(Text.hash(Principal.toText(to) # Principal.toText(msg.caller)))); + from_subaccount = null; + created_at_time = ?{timestamp_nanos = Nat64.fromNat(Int.abs(Time.now()))}; + amount = {e8s = Nat64.fromNat(amount)};}); + + debug{if(debug_channel.deposit_info == true){ D.print("Have funding result: " # debug_show(funding_result))}}; + + + switch(funding_result){ + case(#Ok(result)){ + D.print("an ok result" # debug_show(result)); + return #ok(result); + }; + case(#Err(theerror)){ + D.print("an error" # debug_show(theerror)); + return #err(theerror); + }; + }; + }; + + public shared(msg) func send_payment(ledger: Principal, amount: Nat, to: Principal) : async Result.Result { + + let aledger : ledgerService = actor(Principal.toText(ledger)); + + //D.print("calling transfer"); + let trypayment = await aledger.transfer(to, amount); + + switch(trypayment){ + case(#Ok(result)){ + return #ok(result); + }; + case(#Err(theerror)){ + return #err(Types.errors( #nyi, debug_show(theerror), ?msg.caller)); + }; + }; + }; + + public shared(msg) func ledger_balance(ledger: Principal, wallet: Principal) : async Tokens { + + let aledger : ledgerService = actor(Principal.toText(ledger)); + + //D.print("calling transfer"); + let trybalance = await aledger.account_balance({account = Blob.fromArray(AccountIdentifier.addHash(AccountIdentifier.fromPrincipal(wallet, null))) }); + + return trybalance; + }; + + public shared(msg) func approve_payment(ledger: Principal, amount: Nat, to: Principal) : async Result.Result { + + let aledger : ledgerService = actor(Principal.toText(ledger)); + + //D.print("calling transfer"); + let trypayment = await aledger.approve(to, amount); + + switch(trypayment){ + case(#Ok(result)){ + return #ok(result); + }; + case(#Err(theerror)){ + return #err(Types.errors( #nyi, debug_show(theerror), ?msg.caller)); + }; + }; + }; + + public shared(msg) func try_owner_transfer(canister: Principal, token_id: Text, to: Types.Account) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + + //D.print("calling transfer"); + let try_transfer = await acanister.share_wallet_nft_origyn({from = #principal(Principal.fromActor(this)); to = to; token_id = token_id}); + + switch(try_transfer){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared(msg) func try_offer_refresh(canister: Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + + //D.print("calling transfer"); + let try_refresh = await acanister.sale_nft_origyn(#refresh_offers(null)); + + switch(try_refresh){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + + public shared(msg) func try_set_nft(canister: Principal, token_id: Text, data: CandyType.CandyValue) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + + //D.print("calling set data"); + let try_transfer = await acanister.update_app_nft_origyn(#replace{token_id = token_id; data = data}); + + switch(try_transfer){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared(msg) func try_start_auction(canister: Principal, ledger: Principal, token_id: Text, allow_list : ?[Principal]) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + + //D.print("calling set data"); + let trystart = await acanister.market_transfer_nft_origyn({token_id = "1"; + sales_config = { + escrow_receipt = null; + broker_id = null; + pricing = #auction{ + reserve = ?(100 * 10 ** 8); + token = #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + buy_now = ?(500 * 10 ** 8); + start_price = (1 * 10 ** 8); + start_date = 0; + ending = #date(1); + min_increase = #amount(10*10**8); + allow_list = allow_list + }; + }; } ); + + + switch(trystart){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared(msg) func try_bid(canister: Principal, owner: Principal, ledger: Principal, amount: Nat, token_id: Text, sale_id: Text, broker: ?Principal) : async Result.Result { + + let acanister : Types.Service = actor(Principal.toText(canister)); + + //D.print("calling set data"); + let trystart = await acanister.sale_nft_origyn(#bid({ + broker_id = broker; + sale_id = sale_id; + escrow_receipt = { + seller= #principal(owner); + buyer= #principal(Principal.fromActor(this)); + token_id = token_id; + token = #ic({ + canister = ledger; + standard = #Ledger; + decimals = 8; + symbol = "LDG"; + fee = 200000; + }); + amount = amount}})); + + + + switch(trystart){ + case(#ok(result)){ + switch(result){ + case(#bid(result)){ + #ok(result); + }; + case(_){ + return #err(Types.errors(#unreachable,"shouldnt be here", ?msg.caller)); + }; + }; + + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + + }; + + public shared(msg) func try_sale_manage_nft(canister: Principal, items: [SaleTypes.ManageNFTRequest]) : async Result.Result{ + let asale : SaleTypes.Service = actor(Principal.toText(canister)); + + //D.print("calling set data"); + let trymanage = await asale.manage_nfts_sale_nft_origyn(items); + + + switch(trymanage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + }; + + public shared(msg) func try_sale_nft_allocation(canister: Principal, item: SaleTypes.AllocationRequest) : async Result.Result{ + let asale : SaleTypes.Service = actor(Principal.toText(canister)); + + //D.print("calling allocate data"); + let trymanage = await asale.allocate_sale_nft_origyn(item); + + + switch(trymanage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + }; + + + public shared(msg) func try_sale_nft_redeem(canister: Principal, item: SaleTypes.RedeemAllocationRequest) : async Result.Result{ + let asale : SaleTypes.Service = actor(Principal.toText(canister)); + + //D.print("calling set try_sale_nft_redeem"); + let trymanage = await asale.redeem_allocation_sale_nft_origyn(item); + + + switch(trymanage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + }; + + + public shared(msg) func try_sale_registration(canister: Principal, item: SaleTypes.RegisterEscrowRequest) : async Result.Result{ + let asale : SaleTypes.Service = actor(Principal.toText(canister)); + + D.print("calling set data"); + let trymanage = await asale.register_escrow_sale_nft_origyn(item); + + D.print("done set data" # debug_show(trymanage)); + + + switch(trymanage){ + case(#ok(result)){ + return #ok(result); + }; + case(#err(theerror)){ + return #err(theerror); + }; + }; + }; + + + + + + +}; \ No newline at end of file diff --git a/vessel.dhall b/vessel.dhall new file mode 100644 index 0000000..0635e2a --- /dev/null +++ b/vessel.dhall @@ -0,0 +1,4 @@ +{ + dependencies = [ "base", "array", "crypto", "hash", "encoding", "matchers","candy_0_1_10","principalmo","ext","httpparser","http","json","format","stablerbtree_0_6_1","stablebuffer_0_2_0","map_6_0_0","map","stablebuffer" ], + compiler = Some "0.6.22" +}