-
Notifications
You must be signed in to change notification settings - Fork 4
feat: index bulks #57
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
bb7e8b9
77d22e8
0eda5d3
ef0b675
e73563f
1e222b0
45a3083
efdcf84
b8b6761
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,180 @@ | ||
import { | ||
Address, | ||
BigInt, | ||
Bytes, | ||
dataSource, | ||
DataSourceContext, | ||
DataSourceTemplate, | ||
json, | ||
JSONValueKind, | ||
} from '@graphprotocol/graph-ts'; | ||
import { Bulk, BulkSlice } from '../../generated/schema'; | ||
import { | ||
computeTaskId, | ||
CONTEXT_BOT_FIRST, | ||
CONTEXT_BOT_SIZE, | ||
CONTEXT_BULK, | ||
CONTEXT_DEAL, | ||
CONTEXT_INDEX, | ||
createBulkOrderID, | ||
createBulkSliceID, | ||
fetchDatasetorder, | ||
isAddressString, | ||
isBytes32String, | ||
isHexString, | ||
isIntegerString, | ||
toRLC, | ||
} from '../utils'; | ||
|
||
export function handleBulk(content: Bytes): void { | ||
const hash = dataSource.stringParam(); | ||
const context = dataSource.context(); | ||
Comment on lines
+30
to
+31
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. how this infos are loaded when a handleBulk occure ? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the diff with the content is like having metadata plug to the actual content of the IPFS file ? and this metadata is set up in the matchOrders handler function ? |
||
const dealId = context.getString(CONTEXT_DEAL); | ||
const botFirst = context.getBigInt(CONTEXT_BOT_FIRST); | ||
const botSize = context.getBigInt(CONTEXT_BOT_SIZE); | ||
|
||
const bulkId = dealId; | ||
let bulk = Bulk.load(bulkId); | ||
if (bulk != null) { | ||
// immutable bulk already exists nothing to do | ||
return; | ||
} | ||
bulk = new Bulk(bulkId); | ||
bulk.hash = hash; | ||
|
||
const jsonContent = json.try_fromBytes(content); | ||
if (jsonContent.isOk && jsonContent.value.kind == JSONValueKind.ARRAY) { | ||
const contentArray = jsonContent.value.toArray(); | ||
|
||
for (let i = 0; i < contentArray.length; i++) { | ||
const entry = contentArray[i]; | ||
const index = BigInt.fromI32(i); | ||
if ( | ||
// exclude slice out of deal bot range | ||
index >= botFirst && | ||
index < botFirst.plus(botSize) && | ||
entry.kind == JSONValueKind.STRING | ||
) { | ||
const sliceCid = entry.toString(); | ||
let sliceContext = new DataSourceContext(); | ||
sliceContext.setString(CONTEXT_BULK, bulkId); | ||
sliceContext.setString(CONTEXT_DEAL, dealId); | ||
sliceContext.setBigInt(CONTEXT_INDEX, index); | ||
DataSourceTemplate.createWithContext('BulkSlice', [sliceCid], sliceContext); | ||
} | ||
} | ||
} | ||
|
||
bulk.save(); | ||
} | ||
|
||
export function handleBulkSlice(content: Bytes): void { | ||
const hash = dataSource.stringParam(); | ||
const context = dataSource.context(); | ||
const bulk = context.getString(CONTEXT_BULK); | ||
const dealId = context.getString(CONTEXT_DEAL); | ||
const index = context.getBigInt(CONTEXT_INDEX); | ||
const taskId = computeTaskId(dealId, index); | ||
|
||
if (taskId !== null) { | ||
const bulkSliceId = createBulkSliceID(dealId, index); | ||
let bulkSlice = BulkSlice.load(bulkSliceId); | ||
if (bulkSlice != null) { | ||
// immutable bulk slice already exists nothing to do | ||
return; | ||
} | ||
bulkSlice = new BulkSlice(bulkSliceId); | ||
bulkSlice.task = taskId; | ||
bulkSlice.hash = hash; | ||
bulkSlice.bulk = bulk; | ||
bulkSlice.index = index; | ||
bulkSlice.datasets = new Array<string>(); | ||
bulkSlice.datasetOrders = new Array<string>(); | ||
|
||
const jsonContent = json.try_fromBytes(content); | ||
if (jsonContent.isOk && jsonContent.value.kind == JSONValueKind.ARRAY) { | ||
const datasetOrderArray = jsonContent.value.toArray(); | ||
|
||
for (let i = 0; i < datasetOrderArray.length; i++) { | ||
const datasetOrder = datasetOrderArray[i]; | ||
if (datasetOrder.kind == JSONValueKind.OBJECT) { | ||
const orderObj = datasetOrder.toObject(); | ||
|
||
const datasetEntry = orderObj.getEntry('dataset'); | ||
const datasetPriceEntry = orderObj.getEntry('datasetprice'); | ||
const volumeEntry = orderObj.getEntry('volume'); | ||
const tagEntry = orderObj.getEntry('tag'); | ||
const apprestrictEntry = orderObj.getEntry('apprestrict'); | ||
const workerpoolrestrictEntry = orderObj.getEntry('workerpoolrestrict'); | ||
const requesterrestrictEntry = orderObj.getEntry('requesterrestrict'); | ||
const saltEntry = orderObj.getEntry('salt'); | ||
const signEntry = orderObj.getEntry('sign'); | ||
// check that all entries are present and valid | ||
if ( | ||
datasetEntry != null && | ||
datasetEntry.value.kind == JSONValueKind.STRING && | ||
isAddressString(datasetEntry.value.toString().toLowerCase()) && | ||
datasetPriceEntry != null && | ||
datasetPriceEntry.value.kind == JSONValueKind.STRING && | ||
isIntegerString(datasetPriceEntry.value.toString()) && | ||
volumeEntry != null && | ||
volumeEntry.value.kind == JSONValueKind.STRING && | ||
isIntegerString(volumeEntry.value.toString()) && | ||
tagEntry != null && | ||
tagEntry.value.kind == JSONValueKind.STRING && | ||
isBytes32String(tagEntry.value.toString()) && | ||
apprestrictEntry != null && | ||
apprestrictEntry.value.kind == JSONValueKind.STRING && | ||
isAddressString(apprestrictEntry.value.toString().toLowerCase()) && | ||
workerpoolrestrictEntry != null && | ||
workerpoolrestrictEntry.value.kind == JSONValueKind.STRING && | ||
isAddressString(workerpoolrestrictEntry.value.toString().toLowerCase()) && | ||
requesterrestrictEntry != null && | ||
requesterrestrictEntry.value.kind == JSONValueKind.STRING && | ||
isAddressString(requesterrestrictEntry.value.toString().toLowerCase()) && | ||
saltEntry != null && | ||
saltEntry.value.kind == JSONValueKind.STRING && | ||
isBytes32String(saltEntry.value.toString()) && | ||
signEntry != null && | ||
signEntry.value.kind == JSONValueKind.STRING && | ||
isHexString(signEntry.value.toString()) | ||
) { | ||
// datasetOrderId cannot be orderHash as it could collide with on-chain indexed order | ||
const datasetOrderId = createBulkOrderID(taskId, BigInt.fromI32(i)); | ||
|
||
const datasetAddress = datasetEntry.value.toString().toLowerCase(); | ||
|
||
let datasetOrder = fetchDatasetorder(datasetOrderId); | ||
datasetOrder.dataset = datasetAddress; | ||
datasetOrder.datasetprice = toRLC( | ||
BigInt.fromString(datasetPriceEntry.value.toString()), | ||
); | ||
datasetOrder.volume = BigInt.fromString(volumeEntry.value.toString()); | ||
datasetOrder.tag = Bytes.fromHexString(tagEntry.value.toString()); | ||
datasetOrder.apprestrict = Address.fromString( | ||
apprestrictEntry.value.toString().toLowerCase(), | ||
); | ||
datasetOrder.workerpoolrestrict = Address.fromString( | ||
workerpoolrestrictEntry.value.toString().toLowerCase(), | ||
); | ||
datasetOrder.requesterrestrict = Address.fromString( | ||
requesterrestrictEntry.value.toString().toLowerCase(), | ||
); | ||
datasetOrder.salt = Bytes.fromHexString(saltEntry.value.toString()); | ||
datasetOrder.sign = Bytes.fromHexString(signEntry.value.toString()); | ||
datasetOrder.save(); | ||
|
||
let datasetOrders = bulkSlice.datasetOrders; | ||
datasetOrders.push(datasetOrderId); | ||
bulkSlice.datasetOrders = datasetOrders; | ||
|
||
let datasets = bulkSlice.datasets; | ||
datasets.push(datasetAddress); | ||
bulkSlice.datasets = datasets; | ||
} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. should we warn here in case the dataset order is invalid ? |
||
} | ||
} | ||
} | ||
bulkSlice.save(); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,14 @@ | ||
// SPDX-FileCopyrightText: 2020-2025 IEXEC BLOCKCHAIN TECH <[email protected]> | ||
// SPDX-License-Identifier: Apache-2.0 | ||
|
||
import { Address, BigInt, dataSource } from '@graphprotocol/graph-ts'; | ||
import { | ||
Address, | ||
BigInt, | ||
dataSource, | ||
DataSourceContext, | ||
DataSourceTemplate, | ||
json, | ||
} from '@graphprotocol/graph-ts'; | ||
const chainName = dataSource.network(); | ||
|
||
import { | ||
|
@@ -34,6 +41,9 @@ import { | |
} from '../../generated/schema'; | ||
|
||
import { | ||
CONTEXT_BOT_FIRST, | ||
CONTEXT_BOT_SIZE, | ||
CONTEXT_DEAL, | ||
createContributionID, | ||
createEventID, | ||
fetchAccount, | ||
|
@@ -94,17 +104,40 @@ export function handleOrdersMatched(event: OrdersMatchedEvent): void { | |
deal.timestamp = event.block.timestamp; | ||
deal.save(); | ||
|
||
// if no dataset, check if params include a bulk_cid reference | ||
if (deal.dataset == Address.zero().toHex()) { | ||
const params = json.try_fromString(viewedDeal.params); | ||
if (params.isOk) { | ||
const bulkCid = params.value.toObject().getEntry('bulk_cid'); | ||
if (bulkCid) { | ||
// the same bulk may be used by many deals => we use dealid as bulk ID to avoid collisions | ||
const bulkId = event.params.dealid.toHex(); | ||
let context = new DataSourceContext(); | ||
// Pass onchain data that will be needed in file handlers | ||
context.setString(CONTEXT_DEAL, deal.id); | ||
context.setBigInt(CONTEXT_BOT_FIRST, deal.botFirst); | ||
context.setBigInt(CONTEXT_BOT_SIZE, deal.botSize); | ||
DataSourceTemplate.createWithContext('Bulk', [bulkCid.value.toString()], context); | ||
// bulk may not be indexed, this is not an issue, the model will prune it | ||
deal.bulk = bulkId; | ||
deal.save(); | ||
} | ||
} | ||
} | ||
|
||
const dataset = deal.dataset; | ||
|
||
let apporder = fetchApporder(event.params.appHash.toHex()); | ||
apporder.app = deal.app; | ||
apporder.appprice = deal.appPrice; | ||
apporder.save(); | ||
|
||
let datasetorder = fetchDatasetorder(event.params.datasetHash.toHex()); | ||
if (dataset) datasetorder.dataset = dataset; | ||
datasetorder.datasetprice = deal.datasetPrice; | ||
datasetorder.save(); | ||
if (dataset != Address.zero().toHex()) { | ||
let datasetorder = fetchDatasetorder(event.params.datasetHash.toHex()); | ||
if (dataset) datasetorder.dataset = dataset; | ||
datasetorder.datasetprice = deal.datasetPrice; | ||
datasetorder.save(); | ||
} | ||
Comment on lines
+135
to
+140
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. avoids indexing null datasetorder, not directly linked to bulk, but fixes the existing issue |
||
|
||
let workerpoolorder = fetchWorkerpoolorder(event.params.workerpoolHash.toHex()); | ||
workerpoolorder.workerpool = deal.workerpool; | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I had to modify the nullability on this field because file handlers can't access on-chain handlers entities.
When indexing a datasetorder from a bulk (file handler), we cannot verify that the dataset exists on-chain or exists as an entity.