373
vendor/github.com/status-im/status-go/protocol/LICENSE
generated
vendored
Normal file
373
vendor/github.com/status-im/status-go/protocol/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,373 @@
|
||||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
||||
22
vendor/github.com/status-im/status-go/protocol/README.md
generated
vendored
Normal file
22
vendor/github.com/status-im/status-go/protocol/README.md
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# status-go/protocol
|
||||
|
||||
This is an implementation of the [secure transport](https://specs.status.im/spec/5) and [payloads](https://specs.status.im/spec/6) which are a part of [the Status Client specification](https://specs.status.im/spec/1).
|
||||
|
||||
This implementation uses SQLite and [SQLCipher](github.com/mutecomm/go-sqlcipher) for persistent storage.
|
||||
|
||||
The payloads are encoded using [protocol-buffers](https://developers.google.com/protocol-buffers).
|
||||
|
||||
## Content
|
||||
|
||||
* `messenger.go` is the main file which exports `Messenger` struct. This is a public API to interact with this implementation of the Status Chat Protocol.
|
||||
* `protobuf/` contains protobuf files implementing payloads described in [the Payloads spec](https://specs.status.im/spec/6).
|
||||
* `encryption/` implements [the Secure Transport spec](https://specs.status.im/spec/5).
|
||||
* `transport/` connects the Status Chat Protocol with a wire-protocol which in our case is either Whisper or Waku.
|
||||
* `datasync/` is an adapter for [MVDS](https://specs.vac.dev/specs/mvds.html).
|
||||
* `applicationmetadata/` is an outer layer wrapping a payload with an app-specific metadata like a signature.
|
||||
* `identity/` implements details related to creating a three-word name and identicon.
|
||||
* `migrations/` contains implementation specific migrations for the sqlite database which is used by `Messenger` as a persistent data store.
|
||||
|
||||
## History
|
||||
|
||||
Originally this package was a dedicated repo called `status-protocol-go` and [was migrated](https://github.com/status-im/status-go/pull/1684) into `status-go`. The new `status-go/protocol` package maintained its own dependencies until [sub modules were removed](https://github.com/status-im/status-go/pull/1835/files) and the root go.mod file managed all dependencies for the entire `status-go` repo.
|
||||
142
vendor/github.com/status-im/status-go/protocol/activity_center.go
generated
vendored
Normal file
142
vendor/github.com/status-im/status-go/protocol/activity_center.go
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"errors"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/verification"
|
||||
)
|
||||
|
||||
// The activity center is a place where we store incoming notifications before
|
||||
// they are shown to the users as new chats, in order to mitigate the impact of spam
|
||||
// on the messenger
|
||||
|
||||
type ActivityCenterType int
|
||||
|
||||
const (
|
||||
ActivityCenterNotificationNoType ActivityCenterType = iota
|
||||
ActivityCenterNotificationTypeNewOneToOne
|
||||
ActivityCenterNotificationTypeNewPrivateGroupChat
|
||||
ActivityCenterNotificationTypeMention
|
||||
ActivityCenterNotificationTypeReply
|
||||
ActivityCenterNotificationTypeContactRequest
|
||||
ActivityCenterNotificationTypeCommunityInvitation
|
||||
ActivityCenterNotificationTypeCommunityRequest
|
||||
ActivityCenterNotificationTypeCommunityMembershipRequest
|
||||
ActivityCenterNotificationTypeCommunityKicked
|
||||
ActivityCenterNotificationTypeContactVerification
|
||||
ActivityCenterNotificationTypeContactRemoved
|
||||
ActivityCenterNotificationTypeNewKeypairAddedToPairedDevice
|
||||
ActivityCenterNotificationTypeOwnerTokenReceived
|
||||
ActivityCenterNotificationTypeOwnershipReceived
|
||||
ActivityCenterNotificationTypeOwnershipLost
|
||||
ActivityCenterNotificationTypeSetSignerFailed
|
||||
ActivityCenterNotificationTypeSetSignerDeclined
|
||||
ActivityCenterNotificationTypeShareAccounts
|
||||
ActivityCenterNotificationTypeCommunityTokenReceived
|
||||
)
|
||||
|
||||
type ActivityCenterMembershipStatus int
|
||||
|
||||
const (
|
||||
ActivityCenterMembershipStatusIdle ActivityCenterMembershipStatus = iota
|
||||
ActivityCenterMembershipStatusPending
|
||||
ActivityCenterMembershipStatusAccepted
|
||||
ActivityCenterMembershipStatusDeclined
|
||||
ActivityCenterMembershipStatusAcceptedPending
|
||||
ActivityCenterMembershipStatusDeclinedPending
|
||||
ActivityCenterMembershipOwnershipChanged
|
||||
)
|
||||
|
||||
type ActivityCenterQueryParamsRead uint
|
||||
|
||||
const (
|
||||
ActivityCenterQueryParamsReadRead = iota + 1
|
||||
ActivityCenterQueryParamsReadUnread
|
||||
ActivityCenterQueryParamsReadAll
|
||||
)
|
||||
|
||||
var ErrInvalidActivityCenterNotification = errors.New("invalid activity center notification")
|
||||
|
||||
type ActivityCenterNotification struct {
|
||||
ID types.HexBytes `json:"id"`
|
||||
ChatID string `json:"chatId"`
|
||||
CommunityID string `json:"communityId"`
|
||||
MembershipStatus ActivityCenterMembershipStatus `json:"membershipStatus"`
|
||||
Name string `json:"name"`
|
||||
Author string `json:"author"`
|
||||
Type ActivityCenterType `json:"type"`
|
||||
LastMessage *common.Message `json:"lastMessage"`
|
||||
Message *common.Message `json:"message"`
|
||||
ReplyMessage *common.Message `json:"replyMessage"`
|
||||
Timestamp uint64 `json:"timestamp"`
|
||||
Read bool `json:"read"`
|
||||
Dismissed bool `json:"dismissed"`
|
||||
Deleted bool `json:"deleted"`
|
||||
Accepted bool `json:"accepted"`
|
||||
ContactVerificationStatus verification.RequestStatus `json:"contactVerificationStatus"`
|
||||
//Used for synchronization. Each update should increment the UpdatedAt.
|
||||
//The value should represent the time when the update occurred.
|
||||
UpdatedAt uint64 `json:"updatedAt"`
|
||||
AlbumMessages []*common.Message `json:"albumMessages"`
|
||||
}
|
||||
|
||||
func (n *ActivityCenterNotification) IncrementUpdatedAt(timesource common.TimeSource) {
|
||||
tNow := timesource.GetCurrentTime()
|
||||
// If updatead at is greater or equal than time now, we bump it
|
||||
if n.UpdatedAt >= tNow {
|
||||
n.UpdatedAt++
|
||||
} else {
|
||||
n.UpdatedAt = tNow
|
||||
}
|
||||
}
|
||||
|
||||
type ActivityCenterNotificationsRequest struct {
|
||||
Cursor string `json:"cursor"`
|
||||
Limit uint64 `json:"limit"`
|
||||
ActivityTypes []ActivityCenterType `json:"activityTypes"`
|
||||
ReadType ActivityCenterQueryParamsRead `json:"readType"`
|
||||
}
|
||||
|
||||
type ActivityCenterCountRequest struct {
|
||||
ActivityTypes []ActivityCenterType `json:"activityTypes"`
|
||||
ReadType ActivityCenterQueryParamsRead `json:"readType"`
|
||||
}
|
||||
|
||||
type ActivityCenterPaginationResponse struct {
|
||||
Cursor string `json:"cursor"`
|
||||
Notifications []*ActivityCenterNotification `json:"notifications"`
|
||||
}
|
||||
|
||||
type ActivityCenterCountResponse = map[ActivityCenterType]uint64
|
||||
|
||||
type ActivityCenterState struct {
|
||||
HasSeen bool `json:"hasSeen"`
|
||||
UpdatedAt uint64 `json:"updatedAt"`
|
||||
}
|
||||
|
||||
func (n *ActivityCenterNotification) Valid() error {
|
||||
if len(n.ID) == 0 || n.Type == 0 || n.Timestamp == 0 {
|
||||
return ErrInvalidActivityCenterNotification
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func showMentionOrReplyActivityCenterNotification(publicKey ecdsa.PublicKey, message *common.Message, chat *Chat, responseTo *common.Message) (bool, ActivityCenterType) {
|
||||
if chat == nil || !chat.Active || (!chat.CommunityChat() && !chat.PrivateGroupChat()) || chat.Muted {
|
||||
return false, ActivityCenterNotificationNoType
|
||||
}
|
||||
|
||||
if message.Mentioned {
|
||||
return true, ActivityCenterNotificationTypeMention
|
||||
}
|
||||
|
||||
publicKeyString := common.PubkeyToHex(&publicKey)
|
||||
if responseTo != nil && responseTo.From == publicKeyString {
|
||||
return true, ActivityCenterNotificationTypeReply
|
||||
}
|
||||
|
||||
return false, ActivityCenterNotificationNoType
|
||||
}
|
||||
1383
vendor/github.com/status-im/status-go/protocol/activity_center_persistence.go
generated
vendored
Normal file
1383
vendor/github.com/status-im/status-go/protocol/activity_center_persistence.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
93
vendor/github.com/status-im/status-go/protocol/anonmetrics/adaptors.go
generated
vendored
Normal file
93
vendor/github.com/status-im/status-go/protocol/anonmetrics/adaptors.go
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
package anonmetrics
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"fmt"
|
||||
|
||||
"github.com/davecgh/go-spew/spew"
|
||||
"github.com/golang/protobuf/ptypes"
|
||||
|
||||
"github.com/status-im/status-go/appmetrics"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// adaptProtoToModel is an adaptor helper function to convert a protobuf.AnonymousMetric into a appmetrics.AppMetric
|
||||
func adaptProtoToModel(pbAnonMetric *protobuf.AnonymousMetric) (*appmetrics.AppMetric, error) {
|
||||
t, err := ptypes.Timestamp(pbAnonMetric.CreatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &appmetrics.AppMetric{
|
||||
MessageID: pbAnonMetric.Id,
|
||||
Event: appmetrics.AppMetricEventType(pbAnonMetric.Event),
|
||||
Value: pbAnonMetric.Value,
|
||||
AppVersion: pbAnonMetric.AppVersion,
|
||||
OS: pbAnonMetric.Os,
|
||||
SessionID: pbAnonMetric.SessionId,
|
||||
CreatedAt: t,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// adaptModelToProto is an adaptor helper function to convert a appmetrics.AppMetric into a protobuf.AnonymousMetric
|
||||
func adaptModelToProto(modelAnonMetric appmetrics.AppMetric, sendID *ecdsa.PublicKey) (*protobuf.AnonymousMetric, error) {
|
||||
id := generateProtoID(modelAnonMetric, sendID)
|
||||
createdAt, err := ptypes.TimestampProto(modelAnonMetric.CreatedAt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &protobuf.AnonymousMetric{
|
||||
Id: id,
|
||||
Event: string(modelAnonMetric.Event),
|
||||
Value: modelAnonMetric.Value,
|
||||
AppVersion: modelAnonMetric.AppVersion,
|
||||
Os: modelAnonMetric.OS,
|
||||
SessionId: modelAnonMetric.SessionID,
|
||||
CreatedAt: createdAt,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func adaptModelsToProtoBatch(modelAnonMetrics []appmetrics.AppMetric, sendID *ecdsa.PublicKey) (*protobuf.AnonymousMetricBatch, error) {
|
||||
amb := new(protobuf.AnonymousMetricBatch)
|
||||
|
||||
for _, m := range modelAnonMetrics {
|
||||
p, err := adaptModelToProto(m, sendID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
amb.Metrics = append(amb.Metrics, p)
|
||||
}
|
||||
|
||||
return amb, nil
|
||||
}
|
||||
|
||||
func adaptProtoBatchToModels(protoBatch *protobuf.AnonymousMetricBatch) ([]*appmetrics.AppMetric, error) {
|
||||
if protoBatch == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var ams []*appmetrics.AppMetric
|
||||
|
||||
for _, pm := range protoBatch.Metrics {
|
||||
m, err := adaptProtoToModel(pm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ams = append(ams, m)
|
||||
}
|
||||
|
||||
return ams, nil
|
||||
}
|
||||
|
||||
// NEEDED because we don't send individual metrics, we send only batches
|
||||
func generateProtoID(modelAnonMetric appmetrics.AppMetric, sendID *ecdsa.PublicKey) string {
|
||||
return types.EncodeHex(crypto.Keccak256([]byte(fmt.Sprintf(
|
||||
"%s%s",
|
||||
types.EncodeHex(crypto.FromECDSAPub(sendID)),
|
||||
spew.Sdump(modelAnonMetric)))))
|
||||
}
|
||||
231
vendor/github.com/status-im/status-go/protocol/anonmetrics/client.go
generated
vendored
Normal file
231
vendor/github.com/status-im/status-go/protocol/anonmetrics/client.go
generated
vendored
Normal file
@@ -0,0 +1,231 @@
|
||||
package anonmetrics
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/ecdsa"
|
||||
"errors"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/appmetrics"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
const ActiveClientPhrase = "yes i am wanting the activation of the anon metrics client, please thank you lots thank you"
|
||||
|
||||
type ClientConfig struct {
|
||||
ShouldSend bool
|
||||
SendAddress *ecdsa.PublicKey
|
||||
Active string
|
||||
}
|
||||
|
||||
type Client struct {
|
||||
Config *ClientConfig
|
||||
DB *appmetrics.Database
|
||||
Identity *ecdsa.PrivateKey
|
||||
Logger *zap.Logger
|
||||
|
||||
//messageSender is a message processor used to send metric batch messages
|
||||
messageSender *common.MessageSender
|
||||
|
||||
IntervalInc *FibonacciIntervalIncrementer
|
||||
|
||||
// mainLoopQuit is a channel that concurrently orchestrates that the main loop that should be terminated
|
||||
mainLoopQuit chan struct{}
|
||||
|
||||
// deleteLoopQuit is a channel that concurrently orchestrates that the delete loop that should be terminated
|
||||
deleteLoopQuit chan struct{}
|
||||
|
||||
// DBLock prevents deletion of DB items during mainloop
|
||||
DBLock sync.Mutex
|
||||
}
|
||||
|
||||
func NewClient(sender *common.MessageSender) *Client {
|
||||
return &Client{
|
||||
messageSender: sender,
|
||||
IntervalInc: &FibonacciIntervalIncrementer{
|
||||
Last: 0,
|
||||
Current: 1,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) sendUnprocessedMetrics() {
|
||||
if c.Config.Active != ActiveClientPhrase {
|
||||
return
|
||||
}
|
||||
|
||||
c.Logger.Debug("sendUnprocessedMetrics() triggered")
|
||||
|
||||
c.DBLock.Lock()
|
||||
defer c.DBLock.Unlock()
|
||||
|
||||
// Get all unsent metrics grouped by session id
|
||||
uam, err := c.DB.GetUnprocessedGroupedBySession()
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to get unprocessed messages grouped by session", zap.Error(err))
|
||||
}
|
||||
c.Logger.Debug("unprocessed metrics from db", zap.Reflect("uam", uam))
|
||||
|
||||
for session, batch := range uam {
|
||||
c.Logger.Debug("processing uam from session", zap.String("session", session))
|
||||
|
||||
// Convert the metrics into protobuf
|
||||
amb, err := adaptModelsToProtoBatch(batch, &c.Identity.PublicKey)
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to adapt models to protobuf batch", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
// Generate an ephemeral key per session id
|
||||
ephemeralKey, err := crypto.GenerateKey()
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to generate an ephemeral key", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
// Prepare the protobuf message
|
||||
encodedMessage, err := proto.Marshal(amb)
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to marshal protobuf", zap.Error(err))
|
||||
return
|
||||
}
|
||||
rawMessage := common.RawMessage{
|
||||
Payload: encodedMessage,
|
||||
Sender: ephemeralKey,
|
||||
SkipEncryptionLayer: true,
|
||||
SendOnPersonalTopic: true,
|
||||
MessageType: protobuf.ApplicationMetadataMessage_ANONYMOUS_METRIC_BATCH,
|
||||
}
|
||||
|
||||
c.Logger.Debug("rawMessage prepared from unprocessed anonymous metrics", zap.Reflect("rawMessage", rawMessage))
|
||||
|
||||
// Send the metrics batch
|
||||
_, err = c.messageSender.SendPrivate(context.Background(), c.Config.SendAddress, &rawMessage)
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to send metrics batch message", zap.Error(err))
|
||||
return
|
||||
}
|
||||
|
||||
// Mark metrics as processed
|
||||
err = c.DB.SetToProcessed(batch)
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to set metrics as processed in db", zap.Error(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) mainLoop() error {
|
||||
if c.Config.Active != ActiveClientPhrase {
|
||||
return nil
|
||||
}
|
||||
|
||||
c.Logger.Debug("mainLoop() triggered")
|
||||
|
||||
for {
|
||||
c.sendUnprocessedMetrics()
|
||||
|
||||
waitFor := time.Duration(c.IntervalInc.Next()) * time.Second
|
||||
c.Logger.Debug("mainLoop() wait interval set", zap.Duration("waitFor", waitFor))
|
||||
select {
|
||||
case <-time.After(waitFor):
|
||||
case <-c.mainLoopQuit:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) startMainLoop() {
|
||||
if c.Config.Active != ActiveClientPhrase {
|
||||
return
|
||||
}
|
||||
|
||||
c.Logger.Debug("startMainLoop() triggered")
|
||||
|
||||
c.stopMainLoop()
|
||||
c.mainLoopQuit = make(chan struct{})
|
||||
go func() {
|
||||
c.Logger.Debug("startMainLoop() anonymous go routine triggered")
|
||||
err := c.mainLoop()
|
||||
if err != nil {
|
||||
c.Logger.Error("main loop exited with an error", zap.Error(err))
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (c *Client) deleteLoop() error {
|
||||
// Sleep to give the main lock time to process any old messages
|
||||
time.Sleep(time.Second * 10)
|
||||
|
||||
for {
|
||||
func() {
|
||||
c.DBLock.Lock()
|
||||
defer c.DBLock.Unlock()
|
||||
|
||||
oneWeekAgo := time.Now().Add(time.Hour * 24 * 7 * -1)
|
||||
err := c.DB.DeleteOlderThan(&oneWeekAgo)
|
||||
if err != nil {
|
||||
c.Logger.Error("failed to delete metrics older than given time",
|
||||
zap.Time("time given", oneWeekAgo),
|
||||
zap.Error(err))
|
||||
}
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-time.After(time.Hour):
|
||||
case <-c.deleteLoopQuit:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) startDeleteLoop() {
|
||||
c.stopDeleteLoop()
|
||||
c.deleteLoopQuit = make(chan struct{})
|
||||
go func() {
|
||||
err := c.deleteLoop()
|
||||
if err != nil {
|
||||
c.Logger.Error("delete loop exited with an error", zap.Error(err))
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (c *Client) Start() error {
|
||||
c.Logger.Debug("Main Start() triggered")
|
||||
if c.messageSender == nil {
|
||||
return errors.New("can't start, missing message processor")
|
||||
}
|
||||
|
||||
c.startMainLoop()
|
||||
c.startDeleteLoop()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) stopMainLoop() {
|
||||
c.Logger.Debug("stopMainLoop() triggered")
|
||||
|
||||
if c.mainLoopQuit != nil {
|
||||
c.Logger.Debug("mainLoopQuit not set, attempting to close")
|
||||
|
||||
close(c.mainLoopQuit)
|
||||
c.mainLoopQuit = nil
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) stopDeleteLoop() {
|
||||
if c.deleteLoopQuit != nil {
|
||||
close(c.deleteLoopQuit)
|
||||
c.deleteLoopQuit = nil
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) Stop() error {
|
||||
c.stopMainLoop()
|
||||
c.stopDeleteLoop()
|
||||
return nil
|
||||
}
|
||||
15
vendor/github.com/status-im/status-go/protocol/anonmetrics/interval_incrementer.go
generated
vendored
Normal file
15
vendor/github.com/status-im/status-go/protocol/anonmetrics/interval_incrementer.go
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
package anonmetrics
|
||||
|
||||
type FibonacciIntervalIncrementer struct {
|
||||
Last int64
|
||||
Current int64
|
||||
}
|
||||
|
||||
func (f *FibonacciIntervalIncrementer) Next() int64 {
|
||||
out := f.Last + f.Current
|
||||
|
||||
f.Last = f.Current
|
||||
f.Current = out
|
||||
|
||||
return out
|
||||
}
|
||||
321
vendor/github.com/status-im/status-go/protocol/anonmetrics/migrations/migrations.go
generated
vendored
Normal file
321
vendor/github.com/status-im/status-go/protocol/anonmetrics/migrations/migrations.go
generated
vendored
Normal file
@@ -0,0 +1,321 @@
|
||||
// Code generated by go-bindata. DO NOT EDIT.
|
||||
// sources:
|
||||
// 1619446565_postgres_make_anon_metrics_table.down.sql (24B)
|
||||
// 1619446565_postgres_make_anon_metrics_table.up.sql (443B)
|
||||
// doc.go (380B)
|
||||
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func bindataRead(data []byte, name string) ([]byte, error) {
|
||||
gz, err := gzip.NewReader(bytes.NewBuffer(data))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read %q: %w", name, err)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
_, err = io.Copy(&buf, gz)
|
||||
clErr := gz.Close()
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read %q: %w", name, err)
|
||||
}
|
||||
if clErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
type asset struct {
|
||||
bytes []byte
|
||||
info os.FileInfo
|
||||
digest [sha256.Size]byte
|
||||
}
|
||||
|
||||
type bindataFileInfo struct {
|
||||
name string
|
||||
size int64
|
||||
mode os.FileMode
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
func (fi bindataFileInfo) Name() string {
|
||||
return fi.name
|
||||
}
|
||||
func (fi bindataFileInfo) Size() int64 {
|
||||
return fi.size
|
||||
}
|
||||
func (fi bindataFileInfo) Mode() os.FileMode {
|
||||
return fi.mode
|
||||
}
|
||||
func (fi bindataFileInfo) ModTime() time.Time {
|
||||
return fi.modTime
|
||||
}
|
||||
func (fi bindataFileInfo) IsDir() bool {
|
||||
return false
|
||||
}
|
||||
func (fi bindataFileInfo) Sys() interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
var __1619446565_postgres_make_anon_metrics_tableDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\x48\x2c\x28\x88\xcf\x4d\x2d\x29\xca\x4c\x2e\xb6\xe6\x02\x04\x00\x00\xff\xff\x99\xa7\x42\x7d\x18\x00\x00\x00")
|
||||
|
||||
func _1619446565_postgres_make_anon_metrics_tableDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1619446565_postgres_make_anon_metrics_tableDownSql,
|
||||
"1619446565_postgres_make_anon_metrics_table.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1619446565_postgres_make_anon_metrics_tableDownSql() (*asset, error) {
|
||||
bytes, err := _1619446565_postgres_make_anon_metrics_tableDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1619446565_postgres_make_anon_metrics_table.down.sql", size: 24, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x75, 0xea, 0x1, 0x74, 0xe6, 0xa3, 0x11, 0xd0, 0x86, 0x87, 0x7e, 0x31, 0xb4, 0x1a, 0x27, 0x5d, 0xda, 0x77, 0xa3, 0xf5, 0x1d, 0x88, 0x79, 0xcf, 0xd5, 0x95, 0x75, 0xd, 0x47, 0xa1, 0x90, 0x5}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1619446565_postgres_make_anon_metrics_tableUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\x90\x4d\x6a\xc3\x30\x14\x84\xf7\x39\xc5\x2c\xdb\x10\xc8\x01\xba\x52\xdc\x17\xea\x56\xb6\x53\x59\x2e\x64\x65\x84\xfd\x30\x82\xfa\x07\x49\x75\xe9\xed\x8b\x1d\x92\x38\x81\xac\xbf\xef\x69\x46\x13\x29\x12\x9a\xa0\xc5\x4e\x12\xcc\x30\x94\x2d\x07\x67\x2b\x8f\xa7\x15\x00\xd8\x1a\x39\xa9\x58\x48\x1c\x54\x9c\x08\x75\xc4\x07\x1d\x37\xab\x99\x6d\xd7\x88\xbb\xaa\x6f\x6d\xd7\xe0\x74\x85\xda\x04\x83\xf5\x76\xc6\x2d\x7b\x6f\x1a\x2e\x6d\x8d\x2f\xa1\xa2\x37\xa1\x50\xa4\xf1\x67\x41\x48\x33\x8d\xb4\x90\x72\x33\x7b\x3c\x72\x17\x2e\xca\x2d\x1b\xcd\xf7\x0f\xe3\x3d\xcf\xd2\x3b\x30\x35\x1d\xd9\x79\xdb\x77\x0f\x4e\xfb\x81\x9d\x09\xb6\x6b\x4a\xff\xe7\x03\xb7\x0f\x34\xcf\x7e\x7a\x64\xd9\xf2\x56\xa8\x1c\x9b\xc0\x75\x69\x02\x74\x9c\x50\xae\x45\x72\x58\x28\xe7\x25\x54\xff\x3b\x8d\x60\x96\x0b\x0c\xae\xaf\xd8\x7b\xae\xb1\xcb\x32\x49\xe2\xfa\x09\xbc\xd2\x5e\x14\x52\x63\x2f\x64\x4e\xa7\x20\xc7\x15\xdb\xf1\x3e\xe9\x2c\x46\x85\x52\x94\xea\xf2\x42\x9e\x5f\xfe\x03\x00\x00\xff\xff\xee\x42\x32\x03\xbb\x01\x00\x00")
|
||||
|
||||
func _1619446565_postgres_make_anon_metrics_tableUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1619446565_postgres_make_anon_metrics_tableUpSql,
|
||||
"1619446565_postgres_make_anon_metrics_table.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1619446565_postgres_make_anon_metrics_tableUpSql() (*asset, error) {
|
||||
bytes, err := _1619446565_postgres_make_anon_metrics_tableUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1619446565_postgres_make_anon_metrics_table.up.sql", size: 443, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xd5, 0xdc, 0x72, 0x28, 0x3c, 0xf6, 0x94, 0xb0, 0x47, 0x3d, 0xca, 0x55, 0x3d, 0xf7, 0x83, 0xb8, 0x7d, 0x2f, 0x1e, 0x98, 0xb7, 0xde, 0xa, 0xff, 0xa0, 0x52, 0x60, 0x83, 0x56, 0xc5, 0xd1, 0xa2}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x84\x8f\xbd\x6e\xf3\x30\x0c\x45\x77\x3f\xc5\x45\x96\x2c\x9f\xa5\xe5\x9b\xba\x75\xec\xde\x17\x60\xe4\x6b\x49\x88\x2d\x1a\x22\xf3\xf7\xf6\x85\xd3\x02\xcd\xd6\xf5\x00\xe7\xf0\x32\x46\x7c\x96\x6a\x98\xeb\x42\x54\x43\x63\xa2\x99\xf4\x07\x4e\x4c\x72\x31\xe2\x90\xab\x97\xcb\x29\x24\x5d\xa3\xb9\xf8\xc5\xc6\xba\xc6\xb5\xe6\x2e\xce\x78\xfd\x7f\x18\x62\x44\x92\x76\x74\x14\x69\xd3\xc2\x67\xcb\x60\x2e\xdd\x6b\xcb\xb8\x55\x2f\x10\x6c\x9d\x73\xbd\x07\xbc\x3b\x16\x8a\x39\xbc\x88\x1f\x0d\x5e\x88\x24\xc6\x3d\x33\x6b\x47\xd6\xf1\x54\xdb\x24\x2e\x61\x47\x1f\xf3\x0b\xd9\x17\x26\x59\x16\x4e\x98\xbb\xae\x4f\xd7\x64\x25\xa6\xda\x99\x5c\xfb\xe3\x1f\xc4\x8c\x8e\x26\x2b\x6d\xf7\x8b\x5c\x89\xa6\x3f\xe7\x21\x6d\xfa\xfb\x23\xdc\xb4\x9f\x0d\x62\xe0\x7d\x63\x72\x4e\x61\x18\x36\x49\x67\xc9\xc4\xa6\xe6\xb9\xd3\x86\x21\xc6\xac\x6f\x99\x8d\xbb\xf7\xba\x72\xdc\xce\x19\xdf\xbd\xaa\xcd\x30\x2a\x42\x88\xbf\x20\x64\x45\x88\xc3\x57\x00\x00\x00\xff\xff\xa9\xf1\x73\x83\x7c\x01\x00\x00")
|
||||
|
||||
func docGoBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
_docGo,
|
||||
"doc.go",
|
||||
)
|
||||
}
|
||||
|
||||
func docGo() (*asset, error) {
|
||||
bytes, err := docGoBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "doc.go", size: 380, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x49, 0x1, 0xd4, 0xd6, 0xc7, 0x44, 0xd4, 0xfd, 0x7b, 0x69, 0x1f, 0xe3, 0xe, 0x48, 0x14, 0x99, 0xf0, 0x8e, 0x43, 0xae, 0x54, 0x64, 0xa2, 0x8b, 0x82, 0x1c, 0x2b, 0xb, 0xec, 0xf5, 0xb3, 0xfc}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// Asset loads and returns the asset for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func Asset(name string) ([]byte, error) {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
if f, ok := _bindata[canonicalName]; ok {
|
||||
a, err := f()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
|
||||
}
|
||||
return a.bytes, nil
|
||||
}
|
||||
return nil, fmt.Errorf("Asset %s not found", name)
|
||||
}
|
||||
|
||||
// AssetString returns the asset contents as a string (instead of a []byte).
|
||||
func AssetString(name string) (string, error) {
|
||||
data, err := Asset(name)
|
||||
return string(data), err
|
||||
}
|
||||
|
||||
// MustAsset is like Asset but panics when Asset would return an error.
|
||||
// It simplifies safe initialization of global variables.
|
||||
func MustAsset(name string) []byte {
|
||||
a, err := Asset(name)
|
||||
if err != nil {
|
||||
panic("asset: Asset(" + name + "): " + err.Error())
|
||||
}
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
// MustAssetString is like AssetString but panics when Asset would return an
|
||||
// error. It simplifies safe initialization of global variables.
|
||||
func MustAssetString(name string) string {
|
||||
return string(MustAsset(name))
|
||||
}
|
||||
|
||||
// AssetInfo loads and returns the asset info for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func AssetInfo(name string) (os.FileInfo, error) {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
if f, ok := _bindata[canonicalName]; ok {
|
||||
a, err := f()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
|
||||
}
|
||||
return a.info, nil
|
||||
}
|
||||
return nil, fmt.Errorf("AssetInfo %s not found", name)
|
||||
}
|
||||
|
||||
// AssetDigest returns the digest of the file with the given name. It returns an
|
||||
// error if the asset could not be found or the digest could not be loaded.
|
||||
func AssetDigest(name string) ([sha256.Size]byte, error) {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
if f, ok := _bindata[canonicalName]; ok {
|
||||
a, err := f()
|
||||
if err != nil {
|
||||
return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s can't read by error: %v", name, err)
|
||||
}
|
||||
return a.digest, nil
|
||||
}
|
||||
return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s not found", name)
|
||||
}
|
||||
|
||||
// Digests returns a map of all known files and their checksums.
|
||||
func Digests() (map[string][sha256.Size]byte, error) {
|
||||
mp := make(map[string][sha256.Size]byte, len(_bindata))
|
||||
for name := range _bindata {
|
||||
a, err := _bindata[name]()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mp[name] = a.digest
|
||||
}
|
||||
return mp, nil
|
||||
}
|
||||
|
||||
// AssetNames returns the names of the assets.
|
||||
func AssetNames() []string {
|
||||
names := make([]string, 0, len(_bindata))
|
||||
for name := range _bindata {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// _bindata is a table, holding each asset generator, mapped to its name.
|
||||
var _bindata = map[string]func() (*asset, error){
|
||||
"1619446565_postgres_make_anon_metrics_table.down.sql": _1619446565_postgres_make_anon_metrics_tableDownSql,
|
||||
"1619446565_postgres_make_anon_metrics_table.up.sql": _1619446565_postgres_make_anon_metrics_tableUpSql,
|
||||
"doc.go": docGo,
|
||||
}
|
||||
|
||||
// AssetDebug is true if the assets were built with the debug flag enabled.
|
||||
const AssetDebug = false
|
||||
|
||||
// AssetDir returns the file names below a certain
|
||||
// directory embedded in the file by go-bindata.
|
||||
// For example if you run go-bindata on data/... and data contains the
|
||||
// following hierarchy:
|
||||
//
|
||||
// data/
|
||||
// foo.txt
|
||||
// img/
|
||||
// a.png
|
||||
// b.png
|
||||
//
|
||||
// then AssetDir("data") would return []string{"foo.txt", "img"},
|
||||
// AssetDir("data/img") would return []string{"a.png", "b.png"},
|
||||
// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and
|
||||
// AssetDir("") will return []string{"data"}.
|
||||
func AssetDir(name string) ([]string, error) {
|
||||
node := _bintree
|
||||
if len(name) != 0 {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
pathList := strings.Split(canonicalName, "/")
|
||||
for _, p := range pathList {
|
||||
node = node.Children[p]
|
||||
if node == nil {
|
||||
return nil, fmt.Errorf("Asset %s not found", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
if node.Func != nil {
|
||||
return nil, fmt.Errorf("Asset %s not found", name)
|
||||
}
|
||||
rv := make([]string, 0, len(node.Children))
|
||||
for childName := range node.Children {
|
||||
rv = append(rv, childName)
|
||||
}
|
||||
return rv, nil
|
||||
}
|
||||
|
||||
type bintree struct {
|
||||
Func func() (*asset, error)
|
||||
Children map[string]*bintree
|
||||
}
|
||||
|
||||
var _bintree = &bintree{nil, map[string]*bintree{
|
||||
"1619446565_postgres_make_anon_metrics_table.down.sql": {_1619446565_postgres_make_anon_metrics_tableDownSql, map[string]*bintree{}},
|
||||
"1619446565_postgres_make_anon_metrics_table.up.sql": {_1619446565_postgres_make_anon_metrics_tableUpSql, map[string]*bintree{}},
|
||||
"doc.go": {docGo, map[string]*bintree{}},
|
||||
}}
|
||||
|
||||
// RestoreAsset restores an asset under the given directory.
|
||||
func RestoreAsset(dir, name string) error {
|
||||
data, err := Asset(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info, err := AssetInfo(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = os.WriteFile(_filePath(dir, name), data, info.Mode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
|
||||
}
|
||||
|
||||
// RestoreAssets restores an asset under the given directory recursively.
|
||||
func RestoreAssets(dir, name string) error {
|
||||
children, err := AssetDir(name)
|
||||
// File
|
||||
if err != nil {
|
||||
return RestoreAsset(dir, name)
|
||||
}
|
||||
// Dir
|
||||
for _, child := range children {
|
||||
err = RestoreAssets(dir, filepath.Join(name, child))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func _filePath(dir, name string) string {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
return filepath.Join(append([]string{dir}, strings.Split(canonicalName, "/")...)...)
|
||||
}
|
||||
189
vendor/github.com/status-im/status-go/protocol/anonmetrics/server.go
generated
vendored
Normal file
189
vendor/github.com/status-im/status-go/protocol/anonmetrics/server.go
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
package anonmetrics
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
// Import postgres driver
|
||||
_ "github.com/lib/pq"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/migrate/v4"
|
||||
"github.com/status-im/migrate/v4/database/postgres"
|
||||
bindata "github.com/status-im/migrate/v4/source/go_bindata"
|
||||
|
||||
"github.com/status-im/status-go/appmetrics"
|
||||
"github.com/status-im/status-go/protocol/anonmetrics/migrations"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
const ActiveServerPhrase = "I was thinking that it would be a pretty nice idea if the server functionality was working now, I express gratitude in the anticipation"
|
||||
|
||||
type ServerConfig struct {
|
||||
Enabled bool
|
||||
PostgresURI string
|
||||
Active string
|
||||
}
|
||||
|
||||
type Server struct {
|
||||
Config *ServerConfig
|
||||
Logger *zap.Logger
|
||||
PostgresDB *sql.DB
|
||||
}
|
||||
|
||||
func NewServer(postgresURI string) (*Server, error) {
|
||||
postgresMigration := bindata.Resource(migrations.AssetNames(), migrations.Asset)
|
||||
db, err := NewMigratedDB(postgresURI, postgresMigration)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Server{
|
||||
PostgresDB: db,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Server) Stop() error {
|
||||
if s.PostgresDB != nil {
|
||||
return s.PostgresDB.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) StoreMetrics(appMetricsBatch *protobuf.AnonymousMetricBatch) (appMetrics []*appmetrics.AppMetric, err error) {
|
||||
if s.Config.Active != ActiveServerPhrase {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
s.Logger.Debug("StoreMetrics() triggered with payload",
|
||||
zap.Reflect("appMetricsBatch", appMetricsBatch))
|
||||
appMetrics, err = adaptProtoBatchToModels(appMetricsBatch)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
tx *sql.Tx
|
||||
insert *sql.Stmt
|
||||
)
|
||||
|
||||
// start txn
|
||||
tx, err = s.PostgresDB.Begin()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
//noinspection ALL
|
||||
query := `INSERT INTO app_metrics (message_id, event, value, app_version, operating_system, session_id, created_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
ON CONFLICT (message_id) DO NOTHING;`
|
||||
|
||||
insert, err = tx.Prepare(query)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, metric := range appMetrics {
|
||||
_, err = insert.Exec(
|
||||
metric.MessageID,
|
||||
metric.Event,
|
||||
metric.Value,
|
||||
metric.AppVersion,
|
||||
metric.OS,
|
||||
metric.SessionID,
|
||||
metric.CreatedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Server) getFromRows(rows *sql.Rows) (appMetrics []appmetrics.AppMetric, err error) {
|
||||
for rows.Next() {
|
||||
metric := appmetrics.AppMetric{}
|
||||
err = rows.Scan(
|
||||
&metric.ID,
|
||||
&metric.MessageID,
|
||||
&metric.Event,
|
||||
&metric.Value,
|
||||
&metric.AppVersion,
|
||||
&metric.OS,
|
||||
&metric.SessionID,
|
||||
&metric.CreatedAt,
|
||||
&metric.Processed,
|
||||
&metric.ReceivedAt,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
appMetrics = append(appMetrics, metric)
|
||||
}
|
||||
return appMetrics, nil
|
||||
}
|
||||
|
||||
func (s *Server) GetAppMetrics(limit int, offset int) ([]appmetrics.AppMetric, error) {
|
||||
if s.Config.Active != ActiveServerPhrase {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
rows, err := s.PostgresDB.Query("SELECT id, message_id, event, value, app_version, operating_system, session_id, created_at, processed, received_at FROM app_metrics LIMIT $1 OFFSET $2", limit, offset)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
return s.getFromRows(rows)
|
||||
}
|
||||
|
||||
func NewMigratedDB(uri string, migrationResource *bindata.AssetSource) (*sql.DB, error) {
|
||||
db, err := sql.Open("postgres", uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := setup(db, migrationResource); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func setup(d *sql.DB, migrationResource *bindata.AssetSource) error {
|
||||
m, err := MakeMigration(d, migrationResource)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = m.Up(); err != migrate.ErrNoChange {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func MakeMigration(d *sql.DB, migrationResource *bindata.AssetSource) (*migrate.Migrate, error) {
|
||||
source, err := bindata.WithInstance(migrationResource)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
driver, err := postgres.WithInstance(d, &postgres.Config{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return migrate.NewWithInstance(
|
||||
"go-bindata",
|
||||
source,
|
||||
"postgres",
|
||||
driver)
|
||||
}
|
||||
29
vendor/github.com/status-im/status-go/protocol/audio/type.go
generated
vendored
Normal file
29
vendor/github.com/status-im/status-go/protocol/audio/type.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
func aac(buf []byte) bool {
|
||||
return len(buf) > 1 &&
|
||||
((buf[0] == 0xFF && buf[1] == 0xF1) ||
|
||||
(buf[0] == 0xFF && buf[1] == 0xF9))
|
||||
}
|
||||
|
||||
func amr(buf []byte) bool {
|
||||
return len(buf) > 11 &&
|
||||
buf[0] == 0x23 && buf[1] == 0x21 &&
|
||||
buf[2] == 0x41 && buf[3] == 0x4D &&
|
||||
buf[4] == 0x52 && buf[5] == 0x0A
|
||||
}
|
||||
|
||||
func Type(buf []byte) protobuf.AudioMessage_AudioType {
|
||||
switch {
|
||||
case aac(buf):
|
||||
return protobuf.AudioMessage_AAC
|
||||
case amr(buf):
|
||||
return protobuf.AudioMessage_AMR
|
||||
default:
|
||||
return protobuf.AudioMessage_UNKNOWN_AUDIO_TYPE
|
||||
}
|
||||
}
|
||||
638
vendor/github.com/status-im/status-go/protocol/chat.go
generated
vendored
Normal file
638
vendor/github.com/status-im/status-go/protocol/chat.go
generated
vendored
Normal file
@@ -0,0 +1,638 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"math/rand"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/deprecation"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
userimage "github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/communities"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
"github.com/status-im/status-go/protocol/requests"
|
||||
v1protocol "github.com/status-im/status-go/protocol/v1"
|
||||
"github.com/status-im/status-go/services/utils"
|
||||
)
|
||||
|
||||
var chatColors = []string{
|
||||
"#fa6565", // red
|
||||
"#887af9", // blue
|
||||
"#FE8F59", // orange
|
||||
"#7cda00", // green
|
||||
"#51d0f0", // light-blue
|
||||
"#d37ef4", // purple
|
||||
}
|
||||
|
||||
type ChatType int
|
||||
|
||||
const (
|
||||
ChatTypeOneToOne ChatType = iota + 1
|
||||
ChatTypePublic
|
||||
ChatTypePrivateGroupChat
|
||||
// Deprecated: CreateProfileChat shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
ChatTypeProfile
|
||||
// Deprecated: ChatTypeTimeline shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
ChatTypeTimeline
|
||||
ChatTypeCommunityChat
|
||||
)
|
||||
|
||||
const (
|
||||
FirstMessageTimestampUndefined = 0
|
||||
FirstMessageTimestampNoMessage = 1
|
||||
)
|
||||
|
||||
const (
|
||||
MuteFor1MinDuration = time.Minute
|
||||
MuteFor15MinsDuration = 15 * time.Minute
|
||||
MuteFor1HrsDuration = time.Hour
|
||||
MuteFor8HrsDuration = 8 * time.Hour
|
||||
MuteFor1WeekDuration = 7 * 24 * time.Hour
|
||||
)
|
||||
|
||||
const (
|
||||
MuteFor15Min requests.MutingVariation = iota + 1
|
||||
MuteFor1Hr
|
||||
MuteFor8Hr
|
||||
MuteFor1Week
|
||||
MuteTillUnmuted
|
||||
MuteTill1Min
|
||||
Unmuted
|
||||
)
|
||||
|
||||
const pkStringLength = 68
|
||||
|
||||
// timelineChatID is a magic constant id for your own timeline
|
||||
// Deprecated: timeline chats are no more supported
|
||||
const timelineChatID = "@timeline70bd746ddcc12beb96b2c9d572d0784ab137ffc774f5383e50585a932080b57cca0484b259e61cecbaa33a4c98a300a"
|
||||
|
||||
type Chat struct {
|
||||
// ID is the id of the chat, for public chats it is the name e.g. status, for one-to-one
|
||||
// is the hex encoded public key and for group chats is a random uuid appended with
|
||||
// the hex encoded pk of the creator of the chat
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Color string `json:"color"`
|
||||
Emoji string `json:"emoji"`
|
||||
// Active indicates whether the chat has been soft deleted
|
||||
Active bool `json:"active"`
|
||||
|
||||
ChatType ChatType `json:"chatType"`
|
||||
|
||||
// Timestamp indicates the last time this chat has received/sent a message
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
// LastClockValue indicates the last clock value to be used when sending messages
|
||||
LastClockValue uint64 `json:"lastClockValue"`
|
||||
// DeletedAtClockValue indicates the clock value at time of deletion, messages
|
||||
// with lower clock value of this should be discarded
|
||||
DeletedAtClockValue uint64 `json:"deletedAtClockValue"`
|
||||
// ReadMessagesAtClockValue indicates the clock value of time till all
|
||||
// messages are considered as read
|
||||
ReadMessagesAtClockValue uint64
|
||||
// Denormalized fields
|
||||
UnviewedMessagesCount uint `json:"unviewedMessagesCount"`
|
||||
UnviewedMentionsCount uint `json:"unviewedMentionsCount"`
|
||||
LastMessage *common.Message `json:"lastMessage"`
|
||||
|
||||
// Group chat fields
|
||||
// Members are the members who have been invited to the group chat
|
||||
Members []ChatMember `json:"members"`
|
||||
// MembershipUpdates is all the membership events in the chat
|
||||
MembershipUpdates []v1protocol.MembershipUpdateEvent `json:"membershipUpdateEvents"`
|
||||
|
||||
// Generated username name of the chat for one-to-ones
|
||||
Alias string `json:"alias,omitempty"`
|
||||
// Identicon generated from public key
|
||||
Identicon string `json:"identicon"`
|
||||
|
||||
// Muted is used to check whether we want to receive
|
||||
// push notifications for this chat
|
||||
Muted bool `json:"muted"`
|
||||
|
||||
// Time in which chat was muted
|
||||
MuteTill time.Time `json:"muteTill,omitempty"`
|
||||
|
||||
// Public key of administrator who created invitation link
|
||||
InvitationAdmin string `json:"invitationAdmin,omitempty"`
|
||||
|
||||
// Public key of administrator who sent us group invitation
|
||||
ReceivedInvitationAdmin string `json:"receivedInvitationAdmin,omitempty"`
|
||||
|
||||
// Public key of user profile
|
||||
Profile string `json:"profile,omitempty"`
|
||||
|
||||
// CommunityID is the id of the community it belongs to
|
||||
CommunityID string `json:"communityId,omitempty"`
|
||||
|
||||
// CategoryID is the id of the community category this chat belongs to.
|
||||
CategoryID string `json:"categoryId,omitempty"`
|
||||
|
||||
// Joined is a timestamp that indicates when the chat was joined
|
||||
Joined int64 `json:"joined,omitempty"`
|
||||
|
||||
// SyncedTo is the time up until it has synced with a mailserver
|
||||
SyncedTo uint32 `json:"syncedTo,omitempty"`
|
||||
|
||||
// SyncedFrom is the time from when it was synced with a mailserver
|
||||
SyncedFrom uint32 `json:"syncedFrom,omitempty"`
|
||||
|
||||
// FirstMessageTimestamp is the time when first message was sent/received on the chat
|
||||
// valid only for community chats
|
||||
// 0 - undefined
|
||||
// 1 - no messages
|
||||
FirstMessageTimestamp uint32 `json:"firstMessageTimestamp,omitempty"`
|
||||
|
||||
// Highlight is used for highlight chats
|
||||
Highlight bool `json:"highlight,omitempty"`
|
||||
|
||||
// Image of the chat in Base64 format
|
||||
Base64Image string `json:"image,omitempty"`
|
||||
}
|
||||
|
||||
type ChatPreview struct {
|
||||
// ID is the id of the chat, for public chats it is the name e.g. status, for one-to-one
|
||||
// is the hex encoded public key and for group chats is a random uuid appended with
|
||||
// the hex encoded pk of the creator of the chat
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Color string `json:"color"`
|
||||
Emoji string `json:"emoji"`
|
||||
// Active indicates whether the chat has been soft deleted
|
||||
Active bool `json:"active"`
|
||||
|
||||
ChatType ChatType `json:"chatType"`
|
||||
|
||||
// Timestamp indicates the last time this chat has received/sent a message
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
// LastClockValue indicates the last clock value to be used when sending messages
|
||||
LastClockValue uint64 `json:"lastClockValue"`
|
||||
// DeletedAtClockValue indicates the clock value at time of deletion, messages
|
||||
// with lower clock value of this should be discarded
|
||||
DeletedAtClockValue uint64 `json:"deletedAtClockValue"`
|
||||
|
||||
// Denormalized fields
|
||||
UnviewedMessagesCount uint `json:"unviewedMessagesCount"`
|
||||
UnviewedMentionsCount uint `json:"unviewedMentionsCount"`
|
||||
|
||||
// Generated username name of the chat for one-to-ones
|
||||
Alias string `json:"alias,omitempty"`
|
||||
// Identicon generated from public key
|
||||
Identicon string `json:"identicon"`
|
||||
|
||||
// Muted is used to check whether we want to receive
|
||||
// push notifications for this chat
|
||||
Muted bool `json:"muted,omitempty"`
|
||||
|
||||
// Time in which chat will be ummuted
|
||||
MuteTill time.Time `json:"muteTill,omitempty"`
|
||||
|
||||
// Public key of user profile
|
||||
Profile string `json:"profile,omitempty"`
|
||||
|
||||
// CommunityID is the id of the community it belongs to
|
||||
CommunityID string `json:"communityId,omitempty"`
|
||||
|
||||
// CategoryID is the id of the community category this chat belongs to.
|
||||
CategoryID string `json:"categoryId,omitempty"`
|
||||
|
||||
// Joined is a timestamp that indicates when the chat was joined
|
||||
Joined int64 `json:"joined,omitempty"`
|
||||
|
||||
// SyncedTo is the time up until it has synced with a mailserver
|
||||
SyncedTo uint32 `json:"syncedTo,omitempty"`
|
||||
|
||||
// SyncedFrom is the time from when it was synced with a mailserver
|
||||
SyncedFrom uint32 `json:"syncedFrom,omitempty"`
|
||||
|
||||
// ParsedText is the parsed markdown for displaying
|
||||
ParsedText json.RawMessage `json:"parsedText,omitempty"`
|
||||
|
||||
Text string `json:"text,omitempty"`
|
||||
|
||||
ContentType protobuf.ChatMessage_ContentType `json:"contentType,omitempty"`
|
||||
|
||||
// Highlight is used for highlight chats
|
||||
Highlight bool `json:"highlight,omitempty"`
|
||||
|
||||
// Used for display invited community's name in the last message
|
||||
ContentCommunityID string `json:"contentCommunityId,omitempty"`
|
||||
|
||||
// Members array to represent how many there are for chats preview of group chats
|
||||
Members []ChatMember `json:"members"`
|
||||
|
||||
OutgoingStatus string `json:"outgoingStatus,omitempty"`
|
||||
ResponseTo string `json:"responseTo"`
|
||||
AlbumImagesCount uint32 `json:"albumImagesCount,omitempty"`
|
||||
From string `json:"from"`
|
||||
Deleted bool `json:"deleted"`
|
||||
DeletedForMe bool `json:"deletedForMe"`
|
||||
}
|
||||
|
||||
func (c *Chat) PublicKey() (*ecdsa.PublicKey, error) {
|
||||
// For one to one chatID is an encoded public key
|
||||
if c.ChatType != ChatTypeOneToOne {
|
||||
return nil, nil
|
||||
}
|
||||
return common.HexToPubkey(c.ID)
|
||||
}
|
||||
|
||||
func (c *Chat) Public() bool {
|
||||
return c.ChatType == ChatTypePublic
|
||||
}
|
||||
|
||||
// Deprecated: ProfileUpdates shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
func (c *Chat) ProfileUpdates() bool {
|
||||
return c.ChatType == ChatTypeProfile || len(c.Profile) > 0
|
||||
}
|
||||
|
||||
// Deprecated: Timeline shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
func (c *Chat) Timeline() bool {
|
||||
return c.ChatType == ChatTypeTimeline
|
||||
}
|
||||
|
||||
func (c *Chat) OneToOne() bool {
|
||||
return c.ChatType == ChatTypeOneToOne
|
||||
}
|
||||
|
||||
func (c *Chat) CommunityChat() bool {
|
||||
return c.ChatType == ChatTypeCommunityChat
|
||||
}
|
||||
|
||||
func (c *Chat) PrivateGroupChat() bool {
|
||||
return c.ChatType == ChatTypePrivateGroupChat
|
||||
}
|
||||
|
||||
func (c *Chat) IsActivePersonalChat() bool {
|
||||
return c.Active && (c.OneToOne() || c.PrivateGroupChat() || c.Public()) && c.CommunityID == ""
|
||||
}
|
||||
|
||||
func (c *Chat) shouldBeSynced() bool {
|
||||
isPublicChat := !c.Timeline() && !c.ProfileUpdates() && c.Public()
|
||||
return isPublicChat || c.OneToOne() || c.PrivateGroupChat()
|
||||
}
|
||||
|
||||
func (c *Chat) CommunityChatID() string {
|
||||
if c.ChatType != ChatTypeCommunityChat {
|
||||
return c.ID
|
||||
}
|
||||
|
||||
// Strips out the local prefix of the community-id
|
||||
return c.ID[pkStringLength:]
|
||||
}
|
||||
|
||||
func (c *Chat) Validate() error {
|
||||
if c.ID == "" {
|
||||
return errors.New("chatID can't be blank")
|
||||
}
|
||||
|
||||
if c.OneToOne() {
|
||||
_, err := c.PublicKey()
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Chat) MembersAsPublicKeys() ([]*ecdsa.PublicKey, error) {
|
||||
publicKeys := make([]string, len(c.Members))
|
||||
for idx, item := range c.Members {
|
||||
publicKeys[idx] = item.ID
|
||||
}
|
||||
return stringSliceToPublicKeys(publicKeys)
|
||||
}
|
||||
|
||||
func (c *Chat) HasMember(memberID string) bool {
|
||||
for _, member := range c.Members {
|
||||
if memberID == member.ID {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *Chat) RemoveMember(memberID string) {
|
||||
members := c.Members
|
||||
c.Members = []ChatMember{}
|
||||
for _, member := range members {
|
||||
if memberID != member.ID {
|
||||
c.Members = append(c.Members, member)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Chat) updateChatFromGroupMembershipChanges(g *v1protocol.Group) {
|
||||
|
||||
// ID
|
||||
c.ID = g.ChatID()
|
||||
|
||||
// Name
|
||||
c.Name = g.Name()
|
||||
|
||||
// Color
|
||||
color := g.Color()
|
||||
if color != "" {
|
||||
c.Color = g.Color()
|
||||
}
|
||||
|
||||
// Image
|
||||
base64Image, err := userimage.GetPayloadDataURI(g.Image())
|
||||
if err == nil {
|
||||
c.Base64Image = base64Image
|
||||
}
|
||||
|
||||
// Members
|
||||
members := g.Members()
|
||||
admins := g.Admins()
|
||||
chatMembers := make([]ChatMember, 0, len(members))
|
||||
for _, m := range members {
|
||||
|
||||
chatMember := ChatMember{
|
||||
ID: m,
|
||||
}
|
||||
chatMember.Admin = stringSliceContains(admins, m)
|
||||
chatMembers = append(chatMembers, chatMember)
|
||||
}
|
||||
c.Members = chatMembers
|
||||
|
||||
// MembershipUpdates
|
||||
c.MembershipUpdates = g.Events()
|
||||
}
|
||||
|
||||
// NextClockAndTimestamp returns the next clock value
|
||||
// and the current timestamp
|
||||
func (c *Chat) NextClockAndTimestamp(timesource common.TimeSource) (uint64, uint64) {
|
||||
clock := c.LastClockValue
|
||||
timestamp := timesource.GetCurrentTime()
|
||||
if clock == 0 || clock < timestamp {
|
||||
clock = timestamp
|
||||
} else {
|
||||
clock = clock + 1
|
||||
}
|
||||
c.LastClockValue = clock
|
||||
|
||||
return clock, timestamp
|
||||
}
|
||||
|
||||
func (c *Chat) UpdateFromMessage(message *common.Message, timesource common.TimeSource) error {
|
||||
c.Timestamp = int64(timesource.GetCurrentTime())
|
||||
|
||||
// If the clock of the last message is lower, we set the message
|
||||
if c.LastMessage == nil || c.LastMessage.Clock <= message.Clock {
|
||||
c.LastMessage = message
|
||||
}
|
||||
// If the clock is higher we set the clock
|
||||
if c.LastClockValue < message.Clock {
|
||||
c.LastClockValue = message.Clock
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Chat) UpdateFirstMessageTimestamp(timestamp uint32) bool {
|
||||
if timestamp == c.FirstMessageTimestamp {
|
||||
return false
|
||||
}
|
||||
|
||||
// Do not allow to assign `Undefined`` or `NoMessage` to already set timestamp
|
||||
if timestamp == FirstMessageTimestampUndefined ||
|
||||
(timestamp == FirstMessageTimestampNoMessage &&
|
||||
c.FirstMessageTimestamp != FirstMessageTimestampUndefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if c.FirstMessageTimestamp == FirstMessageTimestampUndefined ||
|
||||
c.FirstMessageTimestamp == FirstMessageTimestampNoMessage ||
|
||||
timestamp < c.FirstMessageTimestamp {
|
||||
c.FirstMessageTimestamp = timestamp
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// ChatMembershipUpdate represent an event on membership of the chat
|
||||
type ChatMembershipUpdate struct {
|
||||
// Unique identifier for the event
|
||||
ID string `json:"id"`
|
||||
// Type indicates the kind of event
|
||||
Type protobuf.MembershipUpdateEvent_EventType `json:"type"`
|
||||
// Name represents the name in the event of changing name events
|
||||
Name string `json:"name,omitempty"`
|
||||
// Clock value of the event
|
||||
ClockValue uint64 `json:"clockValue"`
|
||||
// Signature of the event
|
||||
Signature string `json:"signature"`
|
||||
// Hex encoded public key of the creator of the event
|
||||
From string `json:"from"`
|
||||
// Target of the event for single-target events
|
||||
Member string `json:"member,omitempty"`
|
||||
// Target of the event for multi-target events
|
||||
Members []string `json:"members,omitempty"`
|
||||
}
|
||||
|
||||
// ChatMember represents a member who participates in a group chat
|
||||
type ChatMember struct {
|
||||
// ID is the hex encoded public key of the member
|
||||
ID string `json:"id"`
|
||||
// Admin indicates if the member is an admin of the group chat
|
||||
Admin bool `json:"admin"`
|
||||
}
|
||||
|
||||
func (c ChatMember) PublicKey() (*ecdsa.PublicKey, error) {
|
||||
return common.HexToPubkey(c.ID)
|
||||
}
|
||||
|
||||
func oneToOneChatID(publicKey *ecdsa.PublicKey) string {
|
||||
return types.EncodeHex(crypto.FromECDSAPub(publicKey))
|
||||
}
|
||||
|
||||
func OneToOneFromPublicKey(pk *ecdsa.PublicKey, timesource common.TimeSource) *Chat {
|
||||
chatID := types.EncodeHex(crypto.FromECDSAPub(pk))
|
||||
newChat := CreateOneToOneChat(chatID[:8], pk, timesource)
|
||||
|
||||
return newChat
|
||||
}
|
||||
|
||||
func CreateOneToOneChat(name string, publicKey *ecdsa.PublicKey, timesource common.TimeSource) *Chat {
|
||||
timestamp := timesource.GetCurrentTime()
|
||||
return &Chat{
|
||||
ID: oneToOneChatID(publicKey),
|
||||
Name: name,
|
||||
Timestamp: int64(timestamp),
|
||||
ReadMessagesAtClockValue: 0,
|
||||
Active: true,
|
||||
Joined: int64(timestamp),
|
||||
ChatType: ChatTypeOneToOne,
|
||||
Highlight: true,
|
||||
}
|
||||
}
|
||||
|
||||
func CreateCommunityChat(orgID, chatID string, orgChat *protobuf.CommunityChat, timesource common.TimeSource) *Chat {
|
||||
color := orgChat.Identity.Color
|
||||
if color == "" {
|
||||
color = chatColors[rand.Intn(len(chatColors))] // nolint: gosec
|
||||
}
|
||||
|
||||
timestamp := timesource.GetCurrentTime()
|
||||
return &Chat{
|
||||
CommunityID: orgID,
|
||||
CategoryID: orgChat.CategoryId,
|
||||
Name: orgChat.Identity.DisplayName,
|
||||
Description: orgChat.Identity.Description,
|
||||
Active: true,
|
||||
Color: color,
|
||||
Emoji: orgChat.Identity.Emoji,
|
||||
ID: orgID + chatID,
|
||||
Timestamp: int64(timestamp),
|
||||
Joined: int64(timestamp),
|
||||
ReadMessagesAtClockValue: 0,
|
||||
ChatType: ChatTypeCommunityChat,
|
||||
FirstMessageTimestamp: orgChat.Identity.FirstMessageTimestamp,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Chat) DeepLink() string {
|
||||
if c.OneToOne() {
|
||||
return "status-app://p/" + c.ID
|
||||
}
|
||||
if c.PrivateGroupChat() {
|
||||
return "status-app://g/args?a2=" + c.ID
|
||||
}
|
||||
|
||||
if c.CommunityChat() {
|
||||
communityChannelID := strings.TrimPrefix(c.ID, c.CommunityID)
|
||||
pubkey, err := types.DecodeHex(c.CommunityID)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
serializedCommunityID, err := utils.SerializePublicKey(pubkey)
|
||||
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return "status-app://cc/" + communityChannelID + "#" + serializedCommunityID
|
||||
}
|
||||
|
||||
if c.Public() {
|
||||
return "status-app://" + c.ID
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func CreateCommunityChats(org *communities.Community, timesource common.TimeSource) []*Chat {
|
||||
var chats []*Chat
|
||||
orgID := org.IDString()
|
||||
|
||||
for chatID, chat := range org.Chats() {
|
||||
chats = append(chats, CreateCommunityChat(orgID, chatID, chat, timesource))
|
||||
}
|
||||
return chats
|
||||
}
|
||||
|
||||
func CreatePublicChat(name string, timesource common.TimeSource) *Chat {
|
||||
timestamp := timesource.GetCurrentTime()
|
||||
return &Chat{
|
||||
ID: name,
|
||||
Name: name,
|
||||
Active: true,
|
||||
Timestamp: int64(timestamp),
|
||||
Joined: int64(timestamp),
|
||||
ReadMessagesAtClockValue: 0,
|
||||
Color: chatColors[rand.Intn(len(chatColors))], // nolint: gosec
|
||||
ChatType: ChatTypePublic,
|
||||
}
|
||||
}
|
||||
|
||||
// Deprecated: buildProfileChatID shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
func buildProfileChatID(publicKeyString string) string {
|
||||
return "@" + publicKeyString
|
||||
}
|
||||
|
||||
// Deprecated: CreateProfileChat shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
func CreateProfileChat(pubkey string, timesource common.TimeSource) *Chat {
|
||||
// Return nil to prevent usage of deprecated function
|
||||
if deprecation.ChatProfileDeprecated {
|
||||
return nil
|
||||
}
|
||||
|
||||
id := buildProfileChatID(pubkey)
|
||||
return &Chat{
|
||||
ID: id,
|
||||
Name: id,
|
||||
Active: true,
|
||||
Timestamp: int64(timesource.GetCurrentTime()),
|
||||
Joined: int64(timesource.GetCurrentTime()),
|
||||
Color: chatColors[rand.Intn(len(chatColors))], // nolint: gosec
|
||||
ChatType: ChatTypeProfile,
|
||||
Profile: pubkey,
|
||||
}
|
||||
}
|
||||
|
||||
func CreateGroupChat(timesource common.TimeSource) Chat {
|
||||
timestamp := timesource.GetCurrentTime()
|
||||
synced := uint32(timestamp / 1000)
|
||||
|
||||
return Chat{
|
||||
Active: true,
|
||||
Color: chatColors[rand.Intn(len(chatColors))], // nolint: gosec
|
||||
Timestamp: int64(timestamp),
|
||||
ReadMessagesAtClockValue: 0,
|
||||
SyncedTo: synced,
|
||||
SyncedFrom: synced,
|
||||
ChatType: ChatTypePrivateGroupChat,
|
||||
Highlight: true,
|
||||
}
|
||||
}
|
||||
|
||||
// Deprecated: CreateTimelineChat shouldn't be used
|
||||
// and is only left here in case profile chat feature is re-introduced.
|
||||
func CreateTimelineChat(timesource common.TimeSource) *Chat {
|
||||
// Return nil to prevent usage of deprecated function
|
||||
if deprecation.ChatTimelineDeprecated {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &Chat{
|
||||
ID: timelineChatID,
|
||||
Name: "#" + timelineChatID,
|
||||
Timestamp: int64(timesource.GetCurrentTime()),
|
||||
Active: true,
|
||||
ChatType: ChatTypeTimeline,
|
||||
}
|
||||
}
|
||||
|
||||
func stringSliceToPublicKeys(slice []string) ([]*ecdsa.PublicKey, error) {
|
||||
result := make([]*ecdsa.PublicKey, len(slice))
|
||||
for idx, item := range slice {
|
||||
var err error
|
||||
result[idx], err = common.HexToPubkey(item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func stringSliceContains(slice []string, item string) bool {
|
||||
for _, s := range slice {
|
||||
if s == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
9
vendor/github.com/status-im/status-go/protocol/chat_group_proxy.go
generated
vendored
Normal file
9
vendor/github.com/status-im/status-go/protocol/chat_group_proxy.go
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
v1protocol "github.com/status-im/status-go/protocol/v1"
|
||||
)
|
||||
|
||||
func newProtocolGroupFromChat(chat *Chat) (*v1protocol.Group, error) {
|
||||
return v1protocol.NewGroupWithEvents(chat.ID, chat.MembershipUpdates)
|
||||
}
|
||||
23
vendor/github.com/status-im/status-go/protocol/common/chat_entity.go
generated
vendored
Normal file
23
vendor/github.com/status-im/status-go/protocol/common/chat_entity.go
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// ChatEntity is anything that is sendable in a chat.
|
||||
// Currently it encompass a Message and EmojiReaction.
|
||||
type ChatEntity interface {
|
||||
proto.Message
|
||||
|
||||
GetChatId() string
|
||||
GetMessageType() protobuf.MessageType
|
||||
GetSigPubKey() *ecdsa.PublicKey
|
||||
GetProtobuf() proto.Message
|
||||
WrapGroupMessage() bool
|
||||
|
||||
SetMessageType(messageType protobuf.MessageType)
|
||||
}
|
||||
133
vendor/github.com/status-im/status-go/protocol/common/crypto.go
generated
vendored
Normal file
133
vendor/github.com/status-im/status-go/protocol/common/crypto.go
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/ecdsa"
|
||||
"crypto/rand"
|
||||
"errors"
|
||||
"io"
|
||||
"math/big"
|
||||
|
||||
"golang.org/x/crypto/sha3"
|
||||
|
||||
"github.com/ethereum/go-ethereum/crypto/ecies"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
)
|
||||
|
||||
const (
|
||||
nonceLength = 12
|
||||
defaultECHDSharedKeyLength = 16
|
||||
defaultECHDMACLength = 16
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidCiphertextLength = errors.New("invalid cyphertext length")
|
||||
|
||||
letterRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
||||
numberRunes = []rune("0123456789")
|
||||
alphanumericRunes = append(numberRunes, letterRunes...)
|
||||
)
|
||||
|
||||
func HashPublicKey(pk *ecdsa.PublicKey) []byte {
|
||||
return Shake256(crypto.CompressPubkey(pk))
|
||||
}
|
||||
|
||||
func Decrypt(cyphertext []byte, key []byte) ([]byte, error) {
|
||||
if len(cyphertext) < nonceLength {
|
||||
return nil, ErrInvalidCiphertextLength
|
||||
}
|
||||
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := cyphertext[:nonceLength]
|
||||
return gcm.Open(nil, nonce, cyphertext[nonceLength:], nil)
|
||||
}
|
||||
|
||||
func Encrypt(plaintext []byte, key []byte, reader io.Reader) ([]byte, error) {
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := make([]byte, gcm.NonceSize())
|
||||
if _, err = io.ReadFull(reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gcm.Seal(nonce, nonce, plaintext, nil), nil
|
||||
}
|
||||
|
||||
func Shake256(buf []byte) []byte {
|
||||
h := make([]byte, 64)
|
||||
sha3.ShakeSum256(h, buf)
|
||||
return h
|
||||
}
|
||||
|
||||
// IsPubKeyEqual checks that two public keys are equal
|
||||
func IsPubKeyEqual(a, b *ecdsa.PublicKey) bool {
|
||||
// the curve is always the same, just compare the points
|
||||
return a.X.Cmp(b.X) == 0 && a.Y.Cmp(b.Y) == 0
|
||||
}
|
||||
|
||||
func PubkeyToHex(key *ecdsa.PublicKey) string {
|
||||
return types.EncodeHex(crypto.FromECDSAPub(key))
|
||||
}
|
||||
|
||||
func PubkeyToHexBytes(key *ecdsa.PublicKey) types.HexBytes {
|
||||
return crypto.FromECDSAPub(key)
|
||||
}
|
||||
|
||||
func HexToPubkey(pk string) (*ecdsa.PublicKey, error) {
|
||||
bytes, err := types.DecodeHex(pk)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return crypto.UnmarshalPubkey(bytes)
|
||||
}
|
||||
|
||||
func MakeECDHSharedKey(yourPrivateKey *ecdsa.PrivateKey, theirPubKey *ecdsa.PublicKey) ([]byte, error) {
|
||||
return ecies.ImportECDSA(yourPrivateKey).GenerateShared(
|
||||
ecies.ImportECDSAPublic(theirPubKey),
|
||||
defaultECHDSharedKeyLength,
|
||||
defaultECHDMACLength,
|
||||
)
|
||||
}
|
||||
|
||||
func randomString(choice []rune, n int) (string, error) {
|
||||
max := big.NewInt(int64(len(choice)))
|
||||
rr := rand.Reader
|
||||
|
||||
b := make([]rune, n)
|
||||
for i := range b {
|
||||
pos, err := rand.Int(rr, max)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
b[i] = choice[pos.Int64()]
|
||||
}
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
func RandomAlphabeticalString(n int) (string, error) {
|
||||
return randomString(letterRunes, n)
|
||||
}
|
||||
|
||||
func RandomAlphanumericString(n int) (string, error) {
|
||||
return randomString(alphanumericRunes, n)
|
||||
}
|
||||
5
vendor/github.com/status-im/status-go/protocol/common/errors.go
generated
vendored
Normal file
5
vendor/github.com/status-im/status-go/protocol/common/errors.go
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
package common
|
||||
|
||||
import "errors"
|
||||
|
||||
var ErrRecordNotFound = errors.New("record not found")
|
||||
33
vendor/github.com/status-im/status-go/protocol/common/feature_flags.go
generated
vendored
Normal file
33
vendor/github.com/status-im/status-go/protocol/common/feature_flags.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
package common
|
||||
|
||||
type FeatureFlags struct {
|
||||
// Datasync indicates whether direct messages should be sent exclusively
|
||||
// using datasync, breaking change for non-v1 clients. Public messages
|
||||
// are not impacted
|
||||
Datasync bool
|
||||
|
||||
// PushNotification indicates whether we should be enabling the push notification feature
|
||||
PushNotifications bool
|
||||
|
||||
// MailserverCycle indicates whether we should enable or not the mailserver cycle
|
||||
MailserverCycle bool
|
||||
|
||||
// DisableCheckingForBackup disables backup loop
|
||||
DisableCheckingForBackup bool
|
||||
|
||||
// DisableAutoMessageLoop disables auto message loop
|
||||
DisableAutoMessageLoop bool
|
||||
|
||||
// ResendRawMessagesDisabled indicates whether we should be disabling sending raw messages
|
||||
ResendRawMessagesDisabled bool
|
||||
|
||||
// StoreNodesDisabled indicates whether we should fetch messages from store nodes
|
||||
StoreNodesDisabled bool
|
||||
|
||||
// Peersyncing indicates whether we should advertise and sync messages with other peers
|
||||
Peersyncing bool
|
||||
|
||||
// AutoRequestHistoricMessages indicates whether we should automatically request
|
||||
// historic messages on getting online, connecting to store node, etc.
|
||||
AutoRequestHistoricMessages bool
|
||||
}
|
||||
28
vendor/github.com/status-im/status-go/protocol/common/media_server_image_id.go
generated
vendored
Normal file
28
vendor/github.com/status-im/status-go/protocol/common/media_server_image_id.go
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
package common
|
||||
|
||||
type MediaServerImageIDPrefix string
|
||||
type MediaServerImageIDPostfix string
|
||||
type MediaServerImageID string
|
||||
|
||||
func CreateImageID(prefix MediaServerImageIDPrefix, postfix MediaServerImageIDPostfix) MediaServerImageID {
|
||||
return MediaServerImageID(string(prefix) + string(postfix))
|
||||
}
|
||||
|
||||
const (
|
||||
MediaServerIconPostfix MediaServerImageIDPostfix = "icon"
|
||||
MediaServerBannerPostfix MediaServerImageIDPostfix = "banner"
|
||||
)
|
||||
|
||||
const (
|
||||
MediaServerContactPrefix MediaServerImageIDPrefix = "contact-"
|
||||
MediaServerCommunityPrefix MediaServerImageIDPrefix = "community-"
|
||||
MediaServerChannelCommunityPrefix MediaServerImageIDPrefix = "community-channel-"
|
||||
)
|
||||
|
||||
const (
|
||||
MediaServerContactIcon = MediaServerImageID(string(MediaServerContactPrefix) + string(MediaServerIconPostfix))
|
||||
MediaServerCommunityIcon = MediaServerImageID(string(MediaServerCommunityPrefix) + string(MediaServerIconPostfix))
|
||||
MediaServerCommunityBanner = MediaServerImageID(string(MediaServerCommunityPrefix) + string(MediaServerBannerPostfix))
|
||||
MediaServerChannelCommunityIcon = MediaServerImageID(string(MediaServerChannelCommunityPrefix) + string(MediaServerIconPostfix))
|
||||
MediaServerChannelCommunityBanner = MediaServerImageID(string(MediaServerChannelCommunityPrefix) + string(MediaServerBannerPostfix))
|
||||
)
|
||||
771
vendor/github.com/status-im/status-go/protocol/common/message.go
generated
vendored
Normal file
771
vendor/github.com/status-im/status-go/protocol/common/message.go
generated
vendored
Normal file
@@ -0,0 +1,771 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/markdown"
|
||||
"github.com/status-im/markdown/ast"
|
||||
|
||||
accountJson "github.com/status-im/status-go/account/json"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/audio"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// QuotedMessage contains the original text of the message replied to
|
||||
type QuotedMessage struct {
|
||||
ID string `json:"id"`
|
||||
ContentType int64 `json:"contentType"`
|
||||
// From is a public key of the author of the message.
|
||||
From string `json:"from"`
|
||||
Text string `json:"text"`
|
||||
ParsedText json.RawMessage `json:"parsedText,omitempty"`
|
||||
AlbumImages json.RawMessage `json:"albumImages,omitempty"`
|
||||
AlbumImagesCount int64 `json:"albumImagesCount"`
|
||||
// ImageLocalURL is the local url of the image
|
||||
ImageLocalURL string `json:"image,omitempty"`
|
||||
// AudioLocalURL is the local url of the audio
|
||||
AudioLocalURL string `json:"audio,omitempty"`
|
||||
|
||||
HasSticker bool `json:"sticker,omitempty"`
|
||||
// CommunityID is the id of the community advertised
|
||||
CommunityID string `json:"communityId,omitempty"`
|
||||
|
||||
Deleted bool `json:"deleted,omitempty"`
|
||||
|
||||
DeletedForMe bool `json:"deletedForMe,omitempty"`
|
||||
|
||||
DiscordMessage *protobuf.DiscordMessage `json:"discordMessage,omitempty"`
|
||||
}
|
||||
|
||||
type CommandState int
|
||||
|
||||
const (
|
||||
CommandStateRequestAddressForTransaction CommandState = iota + 1
|
||||
CommandStateRequestAddressForTransactionDeclined
|
||||
CommandStateRequestAddressForTransactionAccepted
|
||||
CommandStateRequestTransaction
|
||||
CommandStateRequestTransactionDeclined
|
||||
CommandStateTransactionPending
|
||||
CommandStateTransactionSent
|
||||
)
|
||||
|
||||
type ContactRequestState int
|
||||
|
||||
const (
|
||||
ContactRequestStatePending ContactRequestState = iota + 1
|
||||
ContactRequestStateAccepted
|
||||
ContactRequestStateDismissed
|
||||
)
|
||||
|
||||
type ContactVerificationState int
|
||||
|
||||
const (
|
||||
ContactVerificationStatePending ContactVerificationState = iota + 1
|
||||
ContactVerificationStateAccepted
|
||||
ContactVerificationStateDeclined
|
||||
ContactVerificationStateTrusted
|
||||
ContactVerificationStateUntrustworthy
|
||||
ContactVerificationStateCanceled
|
||||
)
|
||||
|
||||
const EveryoneMentionTag = "0x00001"
|
||||
|
||||
type CommandParameters struct {
|
||||
// ID is the ID of the initial message
|
||||
ID string `json:"id"`
|
||||
// From is the address we are sending the command from
|
||||
From string `json:"from"`
|
||||
// Address is the address sent with the command
|
||||
Address string `json:"address"`
|
||||
// Contract is the contract address for ERC20 tokens
|
||||
Contract string `json:"contract"`
|
||||
// Value is the value as a string sent
|
||||
Value string `json:"value"`
|
||||
// TransactionHash is the hash of the transaction
|
||||
TransactionHash string `json:"transactionHash"`
|
||||
// CommandState is the state of the command
|
||||
CommandState CommandState `json:"commandState"`
|
||||
// The Signature of the pk-bytes+transaction-hash from the wallet
|
||||
// address originating
|
||||
Signature []byte `json:"signature"`
|
||||
}
|
||||
|
||||
// GapParameters is the From and To indicating the missing period in chat history
|
||||
type GapParameters struct {
|
||||
From uint32 `json:"from,omitempty"`
|
||||
To uint32 `json:"to,omitempty"`
|
||||
}
|
||||
|
||||
func (c *CommandParameters) IsTokenTransfer() bool {
|
||||
return len(c.Contract) != 0
|
||||
}
|
||||
|
||||
const (
|
||||
OutgoingStatusSending = "sending"
|
||||
OutgoingStatusSent = "sent"
|
||||
OutgoingStatusDelivered = "delivered"
|
||||
)
|
||||
|
||||
type Messages []*Message
|
||||
|
||||
func (m Messages) GetClock(i int) uint64 {
|
||||
return m[i].Clock
|
||||
}
|
||||
|
||||
// Message represents a message record in the database,
|
||||
// more specifically in user_messages table.
|
||||
type Message struct {
|
||||
*protobuf.ChatMessage
|
||||
|
||||
// ID calculated as keccak256(compressedAuthorPubKey, data) where data is unencrypted payload.
|
||||
ID string `json:"id"`
|
||||
// WhisperTimestamp is a timestamp of a Whisper envelope.
|
||||
WhisperTimestamp uint64 `json:"whisperTimestamp"`
|
||||
// From is a public key of the author of the message.
|
||||
From string `json:"from"`
|
||||
// Random 3 words name
|
||||
Alias string `json:"alias"`
|
||||
// Identicon of the author
|
||||
Identicon string `json:"identicon"`
|
||||
// The chat id to be stored locally
|
||||
LocalChatID string `json:"localChatId"`
|
||||
// Seen set to true when user have read this message already
|
||||
Seen bool `json:"seen"`
|
||||
OutgoingStatus string `json:"outgoingStatus,omitempty"`
|
||||
|
||||
QuotedMessage *QuotedMessage `json:"quotedMessage"`
|
||||
|
||||
// CommandParameters is the parameters sent with the message
|
||||
CommandParameters *CommandParameters `json:"commandParameters"`
|
||||
|
||||
// GapParameters is the value from/to related to the gap
|
||||
GapParameters *GapParameters `json:"gapParameters,omitempty"`
|
||||
|
||||
// Computed fields
|
||||
// RTL is whether this is a right-to-left message (arabic/hebrew script etc)
|
||||
RTL bool `json:"rtl"`
|
||||
// ParsedText is the parsed markdown for displaying
|
||||
ParsedText []byte `json:"parsedText,omitempty"`
|
||||
// ParsedTextAst is the ast of the parsed text
|
||||
ParsedTextAst *ast.Node `json:"-"`
|
||||
// LineCount is the count of newlines in the message
|
||||
LineCount int `json:"lineCount"`
|
||||
// Base64Image is the converted base64 image
|
||||
Base64Image string `json:"image,omitempty"`
|
||||
// ImagePath is the path of the image to be sent
|
||||
ImagePath string `json:"imagePath,omitempty"`
|
||||
// Base64Audio is the converted base64 audio
|
||||
Base64Audio string `json:"audio,omitempty"`
|
||||
// AudioPath is the path of the audio to be sent
|
||||
AudioPath string `json:"audioPath,omitempty"`
|
||||
// ImageLocalURL is the local url of the image
|
||||
ImageLocalURL string `json:"imageLocalUrl,omitempty"`
|
||||
// AudioLocalURL is the local url of the audio
|
||||
AudioLocalURL string `json:"audioLocalUrl,omitempty"`
|
||||
// StickerLocalURL is the local url of the sticker
|
||||
StickerLocalURL string `json:"stickerLocalUrl,omitempty"`
|
||||
|
||||
// CommunityID is the id of the community to advertise
|
||||
CommunityID string `json:"communityId,omitempty"`
|
||||
|
||||
// Replace indicates that this is a replacement of a message
|
||||
// that has been updated
|
||||
Replace string `json:"replace,omitempty"`
|
||||
New bool `json:"new,omitempty"`
|
||||
|
||||
SigPubKey *ecdsa.PublicKey `json:"-"`
|
||||
|
||||
// Mentions is an array of mentions for a given message
|
||||
Mentions []string
|
||||
|
||||
// Mentioned is whether the user is mentioned in the message
|
||||
Mentioned bool `json:"mentioned"`
|
||||
|
||||
// Replied is whether the user is replied to in the message
|
||||
Replied bool `json:"replied"`
|
||||
|
||||
// Links is an array of links within given message
|
||||
Links []string
|
||||
LinkPreviews []LinkPreview `json:"linkPreviews"`
|
||||
StatusLinkPreviews []StatusLinkPreview `json:"statusLinkPreviews"`
|
||||
|
||||
// EditedAt indicates the clock value it was edited
|
||||
EditedAt uint64 `json:"editedAt"`
|
||||
|
||||
// Deleted indicates if a message was deleted
|
||||
Deleted bool `json:"deleted"`
|
||||
|
||||
DeletedBy string `json:"deletedBy,omitempty"`
|
||||
|
||||
DeletedForMe bool `json:"deletedForMe"`
|
||||
|
||||
// ContactRequestState is the state of the contact request message
|
||||
ContactRequestState ContactRequestState `json:"contactRequestState,omitempty"`
|
||||
|
||||
// ContactVerificationState is the state of the identity verification process
|
||||
ContactVerificationState ContactVerificationState `json:"contactVerificationState,omitempty"`
|
||||
|
||||
DiscordMessage *protobuf.DiscordMessage `json:"discordMessage,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Message) MarshalJSON() ([]byte, error) {
|
||||
type StickerAlias struct {
|
||||
Hash string `json:"hash"`
|
||||
Pack int32 `json:"pack"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
if m.ChatMessage == nil {
|
||||
m.ChatMessage = &protobuf.ChatMessage{}
|
||||
}
|
||||
|
||||
type MessageStructType struct {
|
||||
ID string `json:"id"`
|
||||
WhisperTimestamp uint64 `json:"whisperTimestamp"`
|
||||
From string `json:"from"`
|
||||
Alias string `json:"alias"`
|
||||
Identicon string `json:"identicon"`
|
||||
Seen bool `json:"seen"`
|
||||
OutgoingStatus string `json:"outgoingStatus,omitempty"`
|
||||
QuotedMessage *QuotedMessage `json:"quotedMessage"`
|
||||
RTL bool `json:"rtl"`
|
||||
ParsedText json.RawMessage `json:"parsedText,omitempty"`
|
||||
LineCount int `json:"lineCount"`
|
||||
Text string `json:"text"`
|
||||
ChatID string `json:"chatId"`
|
||||
LocalChatID string `json:"localChatId"`
|
||||
Clock uint64 `json:"clock"`
|
||||
Replace string `json:"replace"`
|
||||
ResponseTo string `json:"responseTo"`
|
||||
New bool `json:"new,omitempty"`
|
||||
EnsName string `json:"ensName"`
|
||||
DisplayName string `json:"displayName"`
|
||||
Image string `json:"image,omitempty"`
|
||||
AlbumID string `json:"albumId,omitempty"`
|
||||
ImageWidth uint32 `json:"imageWidth,omitempty"`
|
||||
ImageHeight uint32 `json:"imageHeight,omitempty"`
|
||||
AlbumImagesCount uint32 `json:"albumImagesCount,omitempty"`
|
||||
Audio string `json:"audio,omitempty"`
|
||||
AudioDurationMs uint64 `json:"audioDurationMs,omitempty"`
|
||||
CommunityID string `json:"communityId,omitempty"`
|
||||
Sticker *StickerAlias `json:"sticker,omitempty"`
|
||||
CommandParameters *CommandParameters `json:"commandParameters,omitempty"`
|
||||
GapParameters *GapParameters `json:"gapParameters,omitempty"`
|
||||
Timestamp uint64 `json:"timestamp"`
|
||||
ContentType protobuf.ChatMessage_ContentType `json:"contentType"`
|
||||
MessageType protobuf.MessageType `json:"messageType"`
|
||||
Mentions []string `json:"mentions,omitempty"`
|
||||
Mentioned bool `json:"mentioned,omitempty"`
|
||||
Replied bool `json:"replied,omitempty"`
|
||||
Links []string `json:"links,omitempty"`
|
||||
LinkPreviews []LinkPreview `json:"linkPreviews,omitempty"`
|
||||
StatusLinkPreviews []StatusLinkPreview `json:"statusLinkPreviews,omitempty"`
|
||||
EditedAt uint64 `json:"editedAt,omitempty"`
|
||||
Deleted bool `json:"deleted,omitempty"`
|
||||
DeletedBy string `json:"deletedBy,omitempty"`
|
||||
DeletedForMe bool `json:"deletedForMe,omitempty"`
|
||||
ContactRequestState ContactRequestState `json:"contactRequestState,omitempty"`
|
||||
ContactVerificationState ContactVerificationState `json:"contactVerificationState,omitempty"`
|
||||
DiscordMessage *protobuf.DiscordMessage `json:"discordMessage,omitempty"`
|
||||
BridgeMessage *protobuf.BridgeMessage `json:"bridgeMessage,omitempty"`
|
||||
}
|
||||
item := MessageStructType{
|
||||
ID: m.ID,
|
||||
WhisperTimestamp: m.WhisperTimestamp,
|
||||
From: m.From,
|
||||
Alias: m.Alias,
|
||||
Identicon: m.Identicon,
|
||||
Seen: m.Seen,
|
||||
OutgoingStatus: m.OutgoingStatus,
|
||||
QuotedMessage: m.QuotedMessage,
|
||||
RTL: m.RTL,
|
||||
ParsedText: m.ParsedText,
|
||||
LineCount: m.LineCount,
|
||||
Text: m.Text,
|
||||
Replace: m.Replace,
|
||||
ChatID: m.ChatId,
|
||||
LocalChatID: m.LocalChatID,
|
||||
Clock: m.Clock,
|
||||
ResponseTo: m.ResponseTo,
|
||||
New: m.New,
|
||||
EnsName: m.EnsName,
|
||||
DisplayName: m.DisplayName,
|
||||
Image: m.ImageLocalURL,
|
||||
Audio: m.AudioLocalURL,
|
||||
CommunityID: m.CommunityID,
|
||||
Timestamp: m.Timestamp,
|
||||
ContentType: m.ContentType,
|
||||
Mentions: m.Mentions,
|
||||
Mentioned: m.Mentioned,
|
||||
Replied: m.Replied,
|
||||
Links: m.Links,
|
||||
LinkPreviews: m.LinkPreviews,
|
||||
StatusLinkPreviews: m.StatusLinkPreviews,
|
||||
MessageType: m.MessageType,
|
||||
CommandParameters: m.CommandParameters,
|
||||
GapParameters: m.GapParameters,
|
||||
EditedAt: m.EditedAt,
|
||||
Deleted: m.Deleted,
|
||||
DeletedBy: m.DeletedBy,
|
||||
DeletedForMe: m.DeletedForMe,
|
||||
ContactRequestState: m.ContactRequestState,
|
||||
ContactVerificationState: m.ContactVerificationState,
|
||||
}
|
||||
|
||||
if sticker := m.GetSticker(); sticker != nil {
|
||||
item.Sticker = &StickerAlias{
|
||||
Pack: sticker.Pack,
|
||||
Hash: sticker.Hash,
|
||||
URL: m.StickerLocalURL,
|
||||
}
|
||||
}
|
||||
|
||||
if audio := m.GetAudio(); audio != nil {
|
||||
item.AudioDurationMs = audio.DurationMs
|
||||
}
|
||||
|
||||
if image := m.GetImage(); image != nil {
|
||||
item.AlbumID = image.AlbumId
|
||||
item.ImageWidth = image.Width
|
||||
item.ImageHeight = image.Height
|
||||
item.AlbumImagesCount = image.AlbumImagesCount
|
||||
}
|
||||
|
||||
if discordMessage := m.GetDiscordMessage(); discordMessage != nil {
|
||||
item.DiscordMessage = discordMessage
|
||||
}
|
||||
|
||||
if bridgeMessage := m.GetBridgeMessage(); bridgeMessage != nil {
|
||||
item.BridgeMessage = bridgeMessage
|
||||
}
|
||||
|
||||
if item.From != "" {
|
||||
ext, err := accountJson.ExtendStructWithPubKeyData(item.From, item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return json.Marshal(ext)
|
||||
}
|
||||
|
||||
return json.Marshal(item)
|
||||
}
|
||||
|
||||
func (m *Message) UnmarshalJSON(data []byte) error {
|
||||
type Alias Message
|
||||
aux := struct {
|
||||
*Alias
|
||||
ResponseTo string `json:"responseTo"`
|
||||
EnsName string `json:"ensName"`
|
||||
DisplayName string `json:"displayName"`
|
||||
ChatID string `json:"chatId"`
|
||||
Sticker *protobuf.StickerMessage `json:"sticker"`
|
||||
AudioDurationMs uint64 `json:"audioDurationMs"`
|
||||
ParsedText json.RawMessage `json:"parsedText"`
|
||||
ContentType protobuf.ChatMessage_ContentType `json:"contentType"`
|
||||
AlbumID string `json:"albumId"`
|
||||
ImageWidth uint32 `json:"imageWidth"`
|
||||
ImageHeight uint32 `json:"imageHeight"`
|
||||
AlbumImagesCount uint32 `json:"albumImagesCount"`
|
||||
From string `json:"from"`
|
||||
Deleted bool `json:"deleted,omitempty"`
|
||||
DeletedForMe bool `json:"deletedForMe,omitempty"`
|
||||
}{
|
||||
Alias: (*Alias)(m),
|
||||
}
|
||||
if err := json.Unmarshal(data, &aux); err != nil {
|
||||
return err
|
||||
}
|
||||
if aux.ContentType == protobuf.ChatMessage_STICKER {
|
||||
m.Payload = &protobuf.ChatMessage_Sticker{Sticker: aux.Sticker}
|
||||
}
|
||||
if aux.ContentType == protobuf.ChatMessage_AUDIO {
|
||||
m.Payload = &protobuf.ChatMessage_Audio{
|
||||
Audio: &protobuf.AudioMessage{DurationMs: aux.AudioDurationMs},
|
||||
}
|
||||
}
|
||||
|
||||
if aux.ContentType == protobuf.ChatMessage_IMAGE {
|
||||
m.Payload = &protobuf.ChatMessage_Image{
|
||||
Image: &protobuf.ImageMessage{
|
||||
AlbumId: aux.AlbumID,
|
||||
Width: aux.ImageWidth,
|
||||
Height: aux.ImageHeight,
|
||||
AlbumImagesCount: aux.AlbumImagesCount},
|
||||
}
|
||||
}
|
||||
|
||||
m.ResponseTo = aux.ResponseTo
|
||||
m.EnsName = aux.EnsName
|
||||
m.DisplayName = aux.DisplayName
|
||||
m.ChatId = aux.ChatID
|
||||
m.ContentType = aux.ContentType
|
||||
m.ParsedText = aux.ParsedText
|
||||
m.From = aux.From
|
||||
m.Deleted = aux.Deleted
|
||||
m.DeletedForMe = aux.DeletedForMe
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check if the first character is Hebrew or Arabic or the RTL character
|
||||
func isRTL(s string) bool {
|
||||
first, _ := utf8.DecodeRuneInString(s)
|
||||
return unicode.Is(unicode.Hebrew, first) ||
|
||||
unicode.Is(unicode.Arabic, first) ||
|
||||
// RTL character
|
||||
first == '\u200f'
|
||||
}
|
||||
|
||||
// parseImage check the message contains an image, and if so
|
||||
// it creates the a base64 encoded version of it.
|
||||
func (m *Message) parseImage() error {
|
||||
if m.ContentType != protobuf.ChatMessage_IMAGE {
|
||||
return nil
|
||||
}
|
||||
image := m.GetImage()
|
||||
if image == nil {
|
||||
return errors.New("image empty")
|
||||
}
|
||||
|
||||
payload := image.Payload
|
||||
|
||||
e64 := base64.StdEncoding
|
||||
|
||||
maxEncLen := e64.EncodedLen(len(payload))
|
||||
encBuf := make([]byte, maxEncLen)
|
||||
|
||||
e64.Encode(encBuf, payload)
|
||||
|
||||
mime, err := images.GetMimeType(image.Payload)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.Base64Image = fmt.Sprintf("data:image/%s;base64,%s", mime, encBuf)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// parseAudio check the message contains an audio, and if so
|
||||
// it creates a base64 encoded version of it.
|
||||
func (m *Message) parseAudio() error {
|
||||
if m.ContentType != protobuf.ChatMessage_AUDIO {
|
||||
return nil
|
||||
}
|
||||
audio := m.GetAudio()
|
||||
if audio == nil {
|
||||
return errors.New("audio empty")
|
||||
}
|
||||
|
||||
payload := audio.Payload
|
||||
|
||||
e64 := base64.StdEncoding
|
||||
|
||||
maxEncLen := e64.EncodedLen(len(payload))
|
||||
encBuf := make([]byte, maxEncLen)
|
||||
|
||||
e64.Encode(encBuf, payload)
|
||||
|
||||
mime, err := getAudioMessageMIME(audio)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.Base64Audio = fmt.Sprintf("data:audio/%s;base64,%s", mime, encBuf)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// implement interface of https://github.com/status-im/markdown/blob/b9fe921681227b1dace4b56364e15edb3b698308/ast/node.go#L701
|
||||
type SimplifiedTextVisitor struct {
|
||||
text string
|
||||
canonicalNames map[string]string
|
||||
}
|
||||
|
||||
func (v *SimplifiedTextVisitor) Visit(node ast.Node, entering bool) ast.WalkStatus {
|
||||
// only on entering we fetch, otherwise we go on
|
||||
if !entering {
|
||||
return ast.GoToNext
|
||||
}
|
||||
|
||||
switch n := node.(type) {
|
||||
case *ast.Mention:
|
||||
literal := string(n.Literal)
|
||||
canonicalName, ok := v.canonicalNames[literal]
|
||||
if ok {
|
||||
v.text += canonicalName
|
||||
} else {
|
||||
v.text += literal
|
||||
}
|
||||
case *ast.Link:
|
||||
destination := string(n.Destination)
|
||||
v.text += destination
|
||||
default:
|
||||
var literal string
|
||||
|
||||
leaf := node.AsLeaf()
|
||||
container := node.AsContainer()
|
||||
if leaf != nil {
|
||||
literal = string(leaf.Literal)
|
||||
} else if container != nil {
|
||||
literal = string(container.Literal)
|
||||
}
|
||||
v.text += literal
|
||||
}
|
||||
|
||||
return ast.GoToNext
|
||||
}
|
||||
|
||||
// implement interface of https://github.com/status-im/markdown/blob/b9fe921681227b1dace4b56364e15edb3b698308/ast/node.go#L701
|
||||
type MentionsAndLinksVisitor struct {
|
||||
identity string
|
||||
mentioned bool
|
||||
mentions []string
|
||||
links []string
|
||||
}
|
||||
|
||||
type LinksVisitor struct {
|
||||
Links []string
|
||||
}
|
||||
|
||||
func (v *MentionsAndLinksVisitor) Visit(node ast.Node, entering bool) ast.WalkStatus {
|
||||
// only on entering we fetch, otherwise we go on
|
||||
if !entering {
|
||||
return ast.GoToNext
|
||||
}
|
||||
switch n := node.(type) {
|
||||
case *ast.Mention:
|
||||
mention := string(n.Literal)
|
||||
if mention == v.identity || mention == EveryoneMentionTag {
|
||||
v.mentioned = true
|
||||
}
|
||||
v.mentions = append(v.mentions, mention)
|
||||
case *ast.Link:
|
||||
v.links = append(v.links, string(n.Destination))
|
||||
}
|
||||
|
||||
return ast.GoToNext
|
||||
}
|
||||
|
||||
func (v *LinksVisitor) Visit(node ast.Node, entering bool) ast.WalkStatus {
|
||||
if !entering {
|
||||
return ast.GoToNext
|
||||
}
|
||||
|
||||
switch n := node.(type) {
|
||||
case *ast.Link:
|
||||
v.Links = append(v.Links, string(n.Destination))
|
||||
}
|
||||
|
||||
return ast.GoToNext
|
||||
}
|
||||
|
||||
func runMentionsAndLinksVisitor(parsedText ast.Node, identity string) *MentionsAndLinksVisitor {
|
||||
visitor := &MentionsAndLinksVisitor{identity: identity}
|
||||
ast.Walk(parsedText, visitor)
|
||||
return visitor
|
||||
}
|
||||
|
||||
func RunLinksVisitor(parsedText ast.Node) *LinksVisitor {
|
||||
visitor := &LinksVisitor{}
|
||||
ast.Walk(parsedText, visitor)
|
||||
return visitor
|
||||
}
|
||||
|
||||
// PrepareContent return the parsed content of the message, the line-count and whether
|
||||
// is a right-to-left message
|
||||
func (m *Message) PrepareContent(identity string) error {
|
||||
var parsedText ast.Node
|
||||
switch m.ContentType {
|
||||
case protobuf.ChatMessage_DISCORD_MESSAGE:
|
||||
parsedText = markdown.Parse([]byte(m.GetDiscordMessage().Content), nil)
|
||||
default:
|
||||
parsedText = markdown.Parse([]byte(m.Text), nil)
|
||||
}
|
||||
|
||||
visitor := runMentionsAndLinksVisitor(parsedText, identity)
|
||||
m.Mentions = visitor.mentions
|
||||
m.Links = visitor.links
|
||||
// Leave it set if already set, as sometimes we might run this without
|
||||
// an identity
|
||||
if !m.Mentioned || identity != "" {
|
||||
m.Mentioned = visitor.mentioned
|
||||
}
|
||||
jsonParsedText, err := json.Marshal(parsedText)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.ParsedTextAst = &parsedText
|
||||
m.ParsedText = jsonParsedText
|
||||
m.LineCount = strings.Count(m.Text, "\n")
|
||||
m.RTL = isRTL(m.Text)
|
||||
if err := m.parseImage(); err != nil {
|
||||
return err
|
||||
}
|
||||
return m.parseAudio()
|
||||
}
|
||||
|
||||
// GetSimplifiedText returns a the text stripped of all the markdown and with mentions
|
||||
// replaced by canonical names
|
||||
func (m *Message) GetSimplifiedText(identity string, canonicalNames map[string]string) (string, error) {
|
||||
|
||||
if m.ContentType == protobuf.ChatMessage_AUDIO {
|
||||
return "Audio", nil
|
||||
}
|
||||
if m.ContentType == protobuf.ChatMessage_STICKER {
|
||||
return "Sticker", nil
|
||||
}
|
||||
if m.ContentType == protobuf.ChatMessage_IMAGE {
|
||||
return "Image", nil
|
||||
}
|
||||
if m.ContentType == protobuf.ChatMessage_COMMUNITY {
|
||||
return "Community", nil
|
||||
}
|
||||
if m.ContentType == protobuf.ChatMessage_SYSTEM_MESSAGE_CONTENT_PRIVATE_GROUP {
|
||||
return "Group", nil
|
||||
}
|
||||
|
||||
if m.ParsedTextAst == nil {
|
||||
err := m.PrepareContent(identity)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
visitor := &SimplifiedTextVisitor{canonicalNames: canonicalNames}
|
||||
ast.Walk(*m.ParsedTextAst, visitor)
|
||||
return visitor.text, nil
|
||||
}
|
||||
|
||||
func getAudioMessageMIME(i *protobuf.AudioMessage) (string, error) {
|
||||
switch i.Type {
|
||||
case protobuf.AudioMessage_AAC:
|
||||
return "aac", nil
|
||||
case protobuf.AudioMessage_AMR:
|
||||
return "amr", nil
|
||||
}
|
||||
|
||||
return "", errors.New("audio format not supported")
|
||||
}
|
||||
|
||||
// GetSigPubKey returns an ecdsa encoded public key
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (m *Message) GetSigPubKey() *ecdsa.PublicKey {
|
||||
return m.SigPubKey
|
||||
}
|
||||
|
||||
// GetProtoBuf returns the struct's embedded protobuf struct
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (m *Message) GetProtobuf() proto.Message {
|
||||
return m.ChatMessage
|
||||
}
|
||||
|
||||
// SetMessageType a setter for the MessageType field
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (m *Message) SetMessageType(messageType protobuf.MessageType) {
|
||||
m.MessageType = messageType
|
||||
}
|
||||
|
||||
// WrapGroupMessage indicates whether we should wrap this in membership information
|
||||
func (m *Message) WrapGroupMessage() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// GetPublicKey attempts to return or recreate the *ecdsa.PublicKey of the Message sender.
|
||||
// If the m.SigPubKey is set this will be returned
|
||||
// If the m.From is present the string is decoded and unmarshalled into a *ecdsa.PublicKey, the m.SigPubKey is set and returned
|
||||
// Else an error is thrown
|
||||
// This function differs from GetSigPubKey() as this function may return an error
|
||||
func (m *Message) GetSenderPubKey() (*ecdsa.PublicKey, error) {
|
||||
// TODO requires tests
|
||||
|
||||
if m.SigPubKey != nil {
|
||||
return m.SigPubKey, nil
|
||||
}
|
||||
|
||||
if len(m.From) > 0 {
|
||||
fromB, err := hex.DecodeString(m.From[2:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
senderPubKey, err := crypto.UnmarshalPubkey(fromB)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
m.SigPubKey = senderPubKey
|
||||
return senderPubKey, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("no Message.SigPubKey or Message.From set unable to get public key")
|
||||
}
|
||||
|
||||
func (m *Message) LoadAudio() error {
|
||||
file, err := os.Open(m.AudioPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
payload, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
}
|
||||
audioMessage := m.GetAudio()
|
||||
if audioMessage == nil {
|
||||
return errors.New("no audio has been passed")
|
||||
}
|
||||
audioMessage.Payload = payload
|
||||
audioMessage.Type = audio.Type(payload)
|
||||
m.Payload = &protobuf.ChatMessage_Audio{Audio: audioMessage}
|
||||
return os.Remove(m.AudioPath)
|
||||
}
|
||||
|
||||
func (m *Message) LoadImage() error {
|
||||
payload, err := images.OpenAndAdjustImage(images.CroppedImage{ImagePath: m.ImagePath}, false)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
imageMessage := m.GetImage()
|
||||
imageMessage.Payload = payload
|
||||
imageMessage.Format = images.GetProtobufImageFormat(payload)
|
||||
m.Payload = &protobuf.ChatMessage_Image{Image: imageMessage}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Message) SetAlbumIDAndImagesCount(albumID string, imagesCount uint32) error {
|
||||
imageMessage := m.GetImage()
|
||||
if imageMessage == nil {
|
||||
return errors.New("Image is empty")
|
||||
}
|
||||
imageMessage.AlbumId = albumID
|
||||
imageMessage.AlbumImagesCount = imagesCount
|
||||
m.Payload = &protobuf.ChatMessage_Image{Image: imageMessage}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewMessage() *Message {
|
||||
return &Message{
|
||||
ChatMessage: &protobuf.ChatMessage{},
|
||||
}
|
||||
}
|
||||
498
vendor/github.com/status-im/status-go/protocol/common/message_linkpreview.go
generated
vendored
Normal file
498
vendor/github.com/status-im/status-go/protocol/common/message_linkpreview.go
generated
vendored
Normal file
@@ -0,0 +1,498 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
gethcrypto "github.com/ethereum/go-ethereum/crypto"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type MakeMediaServerURLType func(msgID string, previewURL string, imageID MediaServerImageID) string
|
||||
type MakeMediaServerURLMessageWrapperType func(previewURL string, imageID MediaServerImageID) string
|
||||
|
||||
type LinkPreviewThumbnail struct {
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
// Non-empty when the thumbnail is available via the media server, i.e. after
|
||||
// the chat message is sent.
|
||||
URL string `json:"url,omitempty"`
|
||||
// Non-empty when the thumbnail payload needs to be shared with the client,
|
||||
// but before it has been persisted.
|
||||
DataURI string `json:"dataUri,omitempty"`
|
||||
}
|
||||
|
||||
type LinkPreview struct {
|
||||
Type protobuf.UnfurledLink_LinkType `json:"type"`
|
||||
URL string `json:"url"`
|
||||
Hostname string `json:"hostname"`
|
||||
Title string `json:"title,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Thumbnail LinkPreviewThumbnail `json:"thumbnail,omitempty"`
|
||||
}
|
||||
|
||||
type StatusContactLinkPreview struct {
|
||||
// PublicKey is: "0x" + hex-encoded decompressed public key.
|
||||
// We keep it a string here for correct json marshalling.
|
||||
PublicKey string `json:"publicKey"`
|
||||
DisplayName string `json:"displayName"`
|
||||
Description string `json:"description"`
|
||||
Icon LinkPreviewThumbnail `json:"icon,omitempty"`
|
||||
}
|
||||
|
||||
type StatusCommunityLinkPreview struct {
|
||||
CommunityID string `json:"communityId"`
|
||||
DisplayName string `json:"displayName"`
|
||||
Description string `json:"description"`
|
||||
MembersCount uint32 `json:"membersCount"`
|
||||
Color string `json:"color"`
|
||||
Icon LinkPreviewThumbnail `json:"icon,omitempty"`
|
||||
Banner LinkPreviewThumbnail `json:"banner,omitempty"`
|
||||
}
|
||||
|
||||
type StatusCommunityChannelLinkPreview struct {
|
||||
ChannelUUID string `json:"channelUuid"`
|
||||
Emoji string `json:"emoji"`
|
||||
DisplayName string `json:"displayName"`
|
||||
Description string `json:"description"`
|
||||
Color string `json:"color"`
|
||||
Community *StatusCommunityLinkPreview `json:"community"`
|
||||
}
|
||||
|
||||
type StatusLinkPreview struct {
|
||||
URL string `json:"url,omitempty"`
|
||||
Contact *StatusContactLinkPreview `json:"contact,omitempty"`
|
||||
Community *StatusCommunityLinkPreview `json:"community,omitempty"`
|
||||
Channel *StatusCommunityChannelLinkPreview `json:"channel,omitempty"`
|
||||
}
|
||||
|
||||
func (thumbnail *LinkPreviewThumbnail) IsEmpty() bool {
|
||||
return thumbnail.Width == 0 &&
|
||||
thumbnail.Height == 0 &&
|
||||
thumbnail.URL == "" &&
|
||||
thumbnail.DataURI == ""
|
||||
}
|
||||
|
||||
func (thumbnail *LinkPreviewThumbnail) clear() {
|
||||
thumbnail.Width = 0
|
||||
thumbnail.Height = 0
|
||||
thumbnail.URL = ""
|
||||
thumbnail.DataURI = ""
|
||||
}
|
||||
|
||||
func (thumbnail *LinkPreviewThumbnail) validateForProto() error {
|
||||
if thumbnail.DataURI == "" {
|
||||
if thumbnail.Width == 0 && thumbnail.Height == 0 {
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("dataUri is empty, but width/height are not zero")
|
||||
}
|
||||
|
||||
if thumbnail.Width == 0 || thumbnail.Height == 0 {
|
||||
return fmt.Errorf("dataUri is not empty, but width/heigth are zero")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (thumbnail *LinkPreviewThumbnail) convertToProto() (*protobuf.UnfurledLinkThumbnail, error) {
|
||||
var payload []byte
|
||||
var err error
|
||||
if thumbnail.DataURI != "" {
|
||||
payload, err = images.GetPayloadFromURI(thumbnail.DataURI)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get data URI payload, url='%s': %w", thumbnail.URL, err)
|
||||
}
|
||||
}
|
||||
|
||||
return &protobuf.UnfurledLinkThumbnail{
|
||||
Width: uint32(thumbnail.Width),
|
||||
Height: uint32(thumbnail.Height),
|
||||
Payload: payload,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (thumbnail *LinkPreviewThumbnail) loadFromProto(
|
||||
input *protobuf.UnfurledLinkThumbnail,
|
||||
URL string,
|
||||
imageID MediaServerImageID,
|
||||
makeMediaServerURL MakeMediaServerURLMessageWrapperType) {
|
||||
|
||||
thumbnail.clear()
|
||||
thumbnail.Width = int(input.Width)
|
||||
thumbnail.Height = int(input.Height)
|
||||
|
||||
if len(input.Payload) > 0 {
|
||||
thumbnail.URL = makeMediaServerURL(URL, imageID)
|
||||
}
|
||||
}
|
||||
|
||||
func (preview *LinkPreview) validateForProto() error {
|
||||
switch preview.Type {
|
||||
case protobuf.UnfurledLink_IMAGE:
|
||||
if preview.URL == "" {
|
||||
return fmt.Errorf("empty url")
|
||||
}
|
||||
if err := preview.Thumbnail.validateForProto(); err != nil {
|
||||
return fmt.Errorf("thumbnail is not valid for proto: %w", err)
|
||||
}
|
||||
return nil
|
||||
default: // Validate as a link type by default.
|
||||
if preview.Title == "" {
|
||||
return fmt.Errorf("title is empty")
|
||||
}
|
||||
if preview.URL == "" {
|
||||
return fmt.Errorf("url is empty")
|
||||
}
|
||||
if err := preview.Thumbnail.validateForProto(); err != nil {
|
||||
return fmt.Errorf("thumbnail is not valid for proto: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (preview *StatusLinkPreview) validateForProto() error {
|
||||
if preview.URL == "" {
|
||||
return fmt.Errorf("url can't be empty")
|
||||
}
|
||||
|
||||
// At least and only one of Contact/Community/Channel should be present in the preview
|
||||
if preview.Contact != nil && preview.Community != nil {
|
||||
return fmt.Errorf("both contact and community are set at the same time")
|
||||
}
|
||||
if preview.Community != nil && preview.Channel != nil {
|
||||
return fmt.Errorf("both community and channel are set at the same time")
|
||||
}
|
||||
if preview.Channel != nil && preview.Contact != nil {
|
||||
return fmt.Errorf("both contact and channel are set at the same time")
|
||||
}
|
||||
if preview.Contact == nil && preview.Community == nil && preview.Channel == nil {
|
||||
return fmt.Errorf("none of contact/community/channel are set")
|
||||
}
|
||||
|
||||
if preview.Contact != nil {
|
||||
if preview.Contact.PublicKey == "" {
|
||||
return fmt.Errorf("contact publicKey is empty")
|
||||
}
|
||||
if err := preview.Contact.Icon.validateForProto(); err != nil {
|
||||
return fmt.Errorf("contact icon invalid: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if preview.Community != nil {
|
||||
return preview.Community.validateForProto()
|
||||
}
|
||||
|
||||
if preview.Channel != nil {
|
||||
if preview.Channel.ChannelUUID == "" {
|
||||
return fmt.Errorf("channelUuid is empty")
|
||||
}
|
||||
if preview.Channel.Community == nil {
|
||||
return fmt.Errorf("channel community is nil")
|
||||
}
|
||||
if err := preview.Channel.Community.validateForProto(); err != nil {
|
||||
return fmt.Errorf("channel community is not valid: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (preview *StatusCommunityLinkPreview) validateForProto() error {
|
||||
if preview == nil {
|
||||
return fmt.Errorf("community preview is empty")
|
||||
}
|
||||
if preview.CommunityID == "" {
|
||||
return fmt.Errorf("communityId is empty")
|
||||
}
|
||||
if err := preview.Icon.validateForProto(); err != nil {
|
||||
return fmt.Errorf("community icon is invalid: %w", err)
|
||||
}
|
||||
if err := preview.Banner.validateForProto(); err != nil {
|
||||
return fmt.Errorf("community banner is invalid: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (preview *StatusCommunityLinkPreview) convertToProto() (*protobuf.UnfurledStatusCommunityLink, error) {
|
||||
if preview == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
icon, err := preview.Icon.convertToProto()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
banner, err := preview.Banner.convertToProto()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
communityID, err := types.DecodeHex(preview.CommunityID)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode community id: %w", err)
|
||||
}
|
||||
|
||||
community := &protobuf.UnfurledStatusCommunityLink{
|
||||
CommunityId: communityID,
|
||||
DisplayName: preview.DisplayName,
|
||||
Description: preview.Description,
|
||||
MembersCount: preview.MembersCount,
|
||||
Color: preview.Color,
|
||||
Icon: icon,
|
||||
Banner: banner,
|
||||
}
|
||||
|
||||
return community, nil
|
||||
}
|
||||
|
||||
func (preview *StatusCommunityLinkPreview) loadFromProto(c *protobuf.UnfurledStatusCommunityLink,
|
||||
URL string, thumbnailPrefix MediaServerImageIDPrefix,
|
||||
makeMediaServerURL MakeMediaServerURLMessageWrapperType) {
|
||||
|
||||
preview.CommunityID = types.EncodeHex(c.CommunityId)
|
||||
preview.DisplayName = c.DisplayName
|
||||
preview.Description = c.Description
|
||||
preview.MembersCount = c.MembersCount
|
||||
preview.Color = c.Color
|
||||
preview.Icon.clear()
|
||||
preview.Banner.clear()
|
||||
|
||||
if icon := c.GetIcon(); icon != nil {
|
||||
preview.Icon.loadFromProto(icon, URL, CreateImageID(thumbnailPrefix, MediaServerIconPostfix), makeMediaServerURL)
|
||||
}
|
||||
if banner := c.GetBanner(); banner != nil {
|
||||
preview.Banner.loadFromProto(banner, URL, CreateImageID(thumbnailPrefix, MediaServerBannerPostfix), makeMediaServerURL)
|
||||
}
|
||||
}
|
||||
|
||||
// ConvertLinkPreviewsToProto expects previews to be correctly sent by the
|
||||
// client because we can't attempt to re-unfurl URLs at this point (it's
|
||||
// actually undesirable). We run a basic validation as an additional safety net.
|
||||
func (m *Message) ConvertLinkPreviewsToProto() ([]*protobuf.UnfurledLink, error) {
|
||||
if len(m.LinkPreviews) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
unfurledLinks := make([]*protobuf.UnfurledLink, 0, len(m.LinkPreviews))
|
||||
|
||||
for _, preview := range m.LinkPreviews {
|
||||
// Do not process subsequent previews because we do expect all previews to
|
||||
// be valid at this stage.
|
||||
if err := preview.validateForProto(); err != nil {
|
||||
return nil, fmt.Errorf("invalid link preview, url='%s': %w", preview.URL, err)
|
||||
}
|
||||
|
||||
var payload []byte
|
||||
var err error
|
||||
if preview.Thumbnail.DataURI != "" {
|
||||
payload, err = images.GetPayloadFromURI(preview.Thumbnail.DataURI)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get data URI payload, url='%s': %w", preview.URL, err)
|
||||
}
|
||||
}
|
||||
|
||||
ul := &protobuf.UnfurledLink{
|
||||
Type: preview.Type,
|
||||
Url: preview.URL,
|
||||
Title: preview.Title,
|
||||
Description: preview.Description,
|
||||
ThumbnailWidth: uint32(preview.Thumbnail.Width),
|
||||
ThumbnailHeight: uint32(preview.Thumbnail.Height),
|
||||
ThumbnailPayload: payload,
|
||||
}
|
||||
unfurledLinks = append(unfurledLinks, ul)
|
||||
}
|
||||
|
||||
return unfurledLinks, nil
|
||||
}
|
||||
|
||||
func (m *Message) ConvertFromProtoToLinkPreviews(makeMediaServerURL func(msgID string, previewURL string) string) []LinkPreview {
|
||||
var links []*protobuf.UnfurledLink
|
||||
|
||||
if links = m.GetUnfurledLinks(); links == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
previews := make([]LinkPreview, 0, len(links))
|
||||
for _, link := range links {
|
||||
parsedURL, err := url.Parse(link.Url)
|
||||
var hostname string
|
||||
// URL parsing in Go can fail with URLs that weren't correctly URL encoded.
|
||||
// This shouldn't happen in general, but if an error happens we just reuse
|
||||
// the full URL.
|
||||
if err != nil {
|
||||
hostname = link.Url
|
||||
} else {
|
||||
hostname = parsedURL.Hostname()
|
||||
}
|
||||
lp := LinkPreview{
|
||||
Description: link.Description,
|
||||
Hostname: hostname,
|
||||
Title: link.Title,
|
||||
Type: link.Type,
|
||||
URL: link.Url,
|
||||
}
|
||||
mediaURL := ""
|
||||
if len(link.ThumbnailPayload) > 0 {
|
||||
mediaURL = makeMediaServerURL(m.ID, link.Url)
|
||||
}
|
||||
if link.GetThumbnailPayload() != nil {
|
||||
lp.Thumbnail.Width = int(link.ThumbnailWidth)
|
||||
lp.Thumbnail.Height = int(link.ThumbnailHeight)
|
||||
lp.Thumbnail.URL = mediaURL
|
||||
}
|
||||
previews = append(previews, lp)
|
||||
}
|
||||
|
||||
return previews
|
||||
}
|
||||
|
||||
func (m *Message) ConvertStatusLinkPreviewsToProto() (*protobuf.UnfurledStatusLinks, error) {
|
||||
if len(m.StatusLinkPreviews) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
unfurledLinks := make([]*protobuf.UnfurledStatusLink, 0, len(m.StatusLinkPreviews))
|
||||
|
||||
for _, preview := range m.StatusLinkPreviews {
|
||||
// We expect all previews to be valid at this stage
|
||||
if err := preview.validateForProto(); err != nil {
|
||||
return nil, fmt.Errorf("invalid status link preview, url='%s': %w", preview.URL, err)
|
||||
}
|
||||
|
||||
ul := &protobuf.UnfurledStatusLink{
|
||||
Url: preview.URL,
|
||||
}
|
||||
|
||||
if preview.Contact != nil {
|
||||
decompressedPublicKey, err := types.DecodeHex(preview.Contact.PublicKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode contact public key: %w", err)
|
||||
}
|
||||
|
||||
publicKey, err := crypto.UnmarshalPubkey(decompressedPublicKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal decompressed public key: %w", err)
|
||||
}
|
||||
|
||||
compressedPublicKey := crypto.CompressPubkey(publicKey)
|
||||
|
||||
icon, err := preview.Contact.Icon.convertToProto()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ul.Payload = &protobuf.UnfurledStatusLink_Contact{
|
||||
Contact: &protobuf.UnfurledStatusContactLink{
|
||||
PublicKey: compressedPublicKey,
|
||||
DisplayName: preview.Contact.DisplayName,
|
||||
Description: preview.Contact.Description,
|
||||
Icon: icon,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if preview.Community != nil {
|
||||
communityPreview, err := preview.Community.convertToProto()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ul.Payload = &protobuf.UnfurledStatusLink_Community{
|
||||
Community: communityPreview,
|
||||
}
|
||||
}
|
||||
|
||||
if preview.Channel != nil {
|
||||
communityPreview, err := preview.Channel.Community.convertToProto()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ul.Payload = &protobuf.UnfurledStatusLink_Channel{
|
||||
Channel: &protobuf.UnfurledStatusChannelLink{
|
||||
ChannelUuid: preview.Channel.ChannelUUID,
|
||||
Emoji: preview.Channel.Emoji,
|
||||
DisplayName: preview.Channel.DisplayName,
|
||||
Description: preview.Channel.Description,
|
||||
Color: preview.Channel.Color,
|
||||
Community: communityPreview,
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
unfurledLinks = append(unfurledLinks, ul)
|
||||
}
|
||||
|
||||
return &protobuf.UnfurledStatusLinks{UnfurledStatusLinks: unfurledLinks}, nil
|
||||
}
|
||||
|
||||
func (m *Message) ConvertFromProtoToStatusLinkPreviews(makeMediaServerURL func(msgID string, previewURL string, imageID MediaServerImageID) string) []StatusLinkPreview {
|
||||
if m.GetUnfurledStatusLinks() == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
links := m.UnfurledStatusLinks.GetUnfurledStatusLinks()
|
||||
|
||||
if links == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// This wrapper adds the messageID to the callback
|
||||
makeMediaServerURLMessageWrapper := func(previewURL string, imageID MediaServerImageID) string {
|
||||
return makeMediaServerURL(m.ID, previewURL, imageID)
|
||||
}
|
||||
|
||||
previews := make([]StatusLinkPreview, 0, len(links))
|
||||
|
||||
for _, link := range links {
|
||||
lp := StatusLinkPreview{
|
||||
URL: link.Url,
|
||||
}
|
||||
|
||||
if c := link.GetContact(); c != nil {
|
||||
publicKey, err := crypto.DecompressPubkey(c.PublicKey)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
lp.Contact = &StatusContactLinkPreview{
|
||||
PublicKey: types.EncodeHex(gethcrypto.FromECDSAPub(publicKey)),
|
||||
DisplayName: c.DisplayName,
|
||||
Description: c.Description,
|
||||
}
|
||||
if icon := c.GetIcon(); icon != nil {
|
||||
lp.Contact.Icon.loadFromProto(icon, link.Url, MediaServerContactIcon, makeMediaServerURLMessageWrapper)
|
||||
}
|
||||
}
|
||||
|
||||
if c := link.GetCommunity(); c != nil {
|
||||
lp.Community = new(StatusCommunityLinkPreview)
|
||||
lp.Community.loadFromProto(c, link.Url, MediaServerCommunityPrefix, makeMediaServerURLMessageWrapper)
|
||||
}
|
||||
|
||||
if c := link.GetChannel(); c != nil {
|
||||
lp.Channel = &StatusCommunityChannelLinkPreview{
|
||||
ChannelUUID: c.ChannelUuid,
|
||||
Emoji: c.Emoji,
|
||||
DisplayName: c.DisplayName,
|
||||
Description: c.Description,
|
||||
Color: c.Color,
|
||||
}
|
||||
if c.Community != nil {
|
||||
lp.Channel.Community = new(StatusCommunityLinkPreview)
|
||||
lp.Channel.Community.loadFromProto(c.Community, link.Url, MediaServerChannelCommunityPrefix, makeMediaServerURLMessageWrapper)
|
||||
}
|
||||
}
|
||||
|
||||
previews = append(previews, lp)
|
||||
}
|
||||
|
||||
return previews
|
||||
}
|
||||
1420
vendor/github.com/status-im/status-go/protocol/common/message_sender.go
generated
vendored
Normal file
1420
vendor/github.com/status-im/status-go/protocol/common/message_sender.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
69
vendor/github.com/status-im/status-go/protocol/common/pin_message.go
generated
vendored
Normal file
69
vendor/github.com/status-im/status-go/protocol/common/pin_message.go
generated
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type PinnedMessages []*PinnedMessage
|
||||
|
||||
func (m PinnedMessages) GetClock(i int) uint64 {
|
||||
return m[i].Message.Clock
|
||||
}
|
||||
|
||||
type PinMessage struct {
|
||||
*protobuf.PinMessage
|
||||
|
||||
// ID calculated as keccak256(compressedAuthorPubKey, data) where data is unencrypted payload.
|
||||
ID string `json:"id"`
|
||||
// MessageID string `json:"messageID"`
|
||||
// WhisperTimestamp is a timestamp of a Whisper envelope.
|
||||
WhisperTimestamp uint64 `json:"whisperTimestamp"`
|
||||
// From is a public key of the user who pinned the message.
|
||||
From string `json:"from"`
|
||||
// The chat id to be stored locally
|
||||
LocalChatID string `json:"localChatId"`
|
||||
SigPubKey *ecdsa.PublicKey `json:"-"`
|
||||
// Identicon of the author
|
||||
Identicon string `json:"identicon"`
|
||||
// Random 3 words name
|
||||
Alias string `json:"alias"`
|
||||
|
||||
Message *PinnedMessage `json:"pinnedMessage"`
|
||||
}
|
||||
|
||||
func NewPinMessage() *PinMessage {
|
||||
return &PinMessage{PinMessage: &protobuf.PinMessage{}}
|
||||
}
|
||||
|
||||
type PinnedMessage struct {
|
||||
Message *Message `json:"message"`
|
||||
PinnedAt uint64 `json:"pinnedAt"`
|
||||
PinnedBy string `json:"pinnedBy"`
|
||||
}
|
||||
|
||||
// WrapGroupMessage indicates whether we should wrap this in membership information
|
||||
func (m *PinMessage) WrapGroupMessage() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// SetMessageType a setter for the MessageType field
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (m *PinMessage) SetMessageType(messageType protobuf.MessageType) {
|
||||
m.MessageType = messageType
|
||||
}
|
||||
|
||||
// GetProtoBuf returns the struct's embedded protobuf struct
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (m *PinMessage) GetProtobuf() proto.Message {
|
||||
return m.PinMessage
|
||||
}
|
||||
|
||||
// GetSigPubKey returns an ecdsa encoded public key
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (m *PinMessage) GetSigPubKey() *ecdsa.PublicKey {
|
||||
return m.SigPubKey
|
||||
}
|
||||
41
vendor/github.com/status-im/status-go/protocol/common/raw_message.go
generated
vendored
Normal file
41
vendor/github.com/status-im/status-go/protocol/common/raw_message.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type CommKeyExMsgType uint8
|
||||
|
||||
const (
|
||||
KeyExMsgNone CommKeyExMsgType = 0
|
||||
KeyExMsgReuse CommKeyExMsgType = 1
|
||||
KeyExMsgRekey CommKeyExMsgType = 2
|
||||
)
|
||||
|
||||
// RawMessage represent a sent or received message, kept for being able
|
||||
// to re-send/propagate
|
||||
type RawMessage struct {
|
||||
ID string
|
||||
LocalChatID string
|
||||
LastSent uint64
|
||||
SendCount int
|
||||
Sent bool
|
||||
ResendAutomatically bool
|
||||
SkipEncryptionLayer bool // don't wrap message into ProtocolMessage
|
||||
SendPushNotification bool
|
||||
MessageType protobuf.ApplicationMetadataMessage_Type
|
||||
Payload []byte
|
||||
Sender *ecdsa.PrivateKey
|
||||
Recipients []*ecdsa.PublicKey
|
||||
SkipGroupMessageWrap bool
|
||||
SkipApplicationWrap bool
|
||||
SendOnPersonalTopic bool
|
||||
CommunityID []byte
|
||||
CommunityKeyExMsgType CommKeyExMsgType
|
||||
Ephemeral bool
|
||||
BeforeDispatch func(*RawMessage) error
|
||||
HashRatchetGroupID []byte
|
||||
PubsubTopic string
|
||||
}
|
||||
424
vendor/github.com/status-im/status-go/protocol/common/raw_messages_persistence.go
generated
vendored
Normal file
424
vendor/github.com/status-im/status-go/protocol/common/raw_messages_persistence.go
generated
vendored
Normal file
@@ -0,0 +1,424 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/ecdsa"
|
||||
"database/sql"
|
||||
"encoding/gob"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type RawMessageConfirmation struct {
|
||||
// DataSyncID is the ID of the datasync message sent
|
||||
DataSyncID []byte
|
||||
// MessageID is the message id of the message
|
||||
MessageID []byte
|
||||
// PublicKey is the compressed receiver public key
|
||||
PublicKey []byte
|
||||
// ConfirmedAt is the unix timestamp in seconds of when the message was confirmed
|
||||
ConfirmedAt int64
|
||||
}
|
||||
|
||||
type RawMessagesPersistence struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func NewRawMessagesPersistence(db *sql.DB) *RawMessagesPersistence {
|
||||
return &RawMessagesPersistence{db: db}
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) SaveRawMessage(message *RawMessage) error {
|
||||
tx, err := db.db.BeginTx(context.Background(), &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
// don't shadow original error
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
var pubKeys [][]byte
|
||||
for _, pk := range message.Recipients {
|
||||
pubKeys = append(pubKeys, crypto.CompressPubkey(pk))
|
||||
}
|
||||
// Encode recipients
|
||||
var encodedRecipients bytes.Buffer
|
||||
encoder := gob.NewEncoder(&encodedRecipients)
|
||||
|
||||
if err := encoder.Encode(pubKeys); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If the message is not sent, we check whether there's a record
|
||||
// in the database already and preserve the state
|
||||
if !message.Sent {
|
||||
oldMessage, err := db.rawMessageByID(tx, message.ID)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
if oldMessage != nil {
|
||||
message.Sent = oldMessage.Sent
|
||||
}
|
||||
}
|
||||
|
||||
_, err = tx.Exec(`
|
||||
INSERT INTO
|
||||
raw_messages
|
||||
(
|
||||
id,
|
||||
local_chat_id,
|
||||
last_sent,
|
||||
send_count,
|
||||
sent,
|
||||
message_type,
|
||||
resend_automatically,
|
||||
recipients,
|
||||
skip_encryption,
|
||||
send_push_notification,
|
||||
skip_group_message_wrap,
|
||||
send_on_personal_topic,
|
||||
payload
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
message.ID,
|
||||
message.LocalChatID,
|
||||
message.LastSent,
|
||||
message.SendCount,
|
||||
message.Sent,
|
||||
message.MessageType,
|
||||
message.ResendAutomatically,
|
||||
encodedRecipients.Bytes(),
|
||||
message.SkipEncryptionLayer,
|
||||
message.SendPushNotification,
|
||||
message.SkipGroupMessageWrap,
|
||||
message.SendOnPersonalTopic,
|
||||
message.Payload)
|
||||
return err
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) RawMessageByID(id string) (*RawMessage, error) {
|
||||
tx, err := db.db.BeginTx(context.Background(), &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
// don't shadow original error
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
return db.rawMessageByID(tx, id)
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) rawMessageByID(tx *sql.Tx, id string) (*RawMessage, error) {
|
||||
var rawPubKeys [][]byte
|
||||
var encodedRecipients []byte
|
||||
var skipGroupMessageWrap sql.NullBool
|
||||
var sendOnPersonalTopic sql.NullBool
|
||||
message := &RawMessage{}
|
||||
|
||||
err := tx.QueryRow(`
|
||||
SELECT
|
||||
id,
|
||||
local_chat_id,
|
||||
last_sent,
|
||||
send_count,
|
||||
sent,
|
||||
message_type,
|
||||
resend_automatically,
|
||||
recipients,
|
||||
skip_encryption,
|
||||
send_push_notification,
|
||||
skip_group_message_wrap,
|
||||
send_on_personal_topic,
|
||||
payload
|
||||
FROM
|
||||
raw_messages
|
||||
WHERE
|
||||
id = ?`,
|
||||
id,
|
||||
).Scan(
|
||||
&message.ID,
|
||||
&message.LocalChatID,
|
||||
&message.LastSent,
|
||||
&message.SendCount,
|
||||
&message.Sent,
|
||||
&message.MessageType,
|
||||
&message.ResendAutomatically,
|
||||
&encodedRecipients,
|
||||
&message.SkipEncryptionLayer,
|
||||
&message.SendPushNotification,
|
||||
&skipGroupMessageWrap,
|
||||
&sendOnPersonalTopic,
|
||||
&message.Payload,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if rawPubKeys != nil {
|
||||
// Restore recipients
|
||||
decoder := gob.NewDecoder(bytes.NewBuffer(encodedRecipients))
|
||||
err = decoder.Decode(&rawPubKeys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, pkBytes := range rawPubKeys {
|
||||
pubkey, err := crypto.UnmarshalPubkey(pkBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
message.Recipients = append(message.Recipients, pubkey)
|
||||
}
|
||||
}
|
||||
|
||||
if skipGroupMessageWrap.Valid {
|
||||
message.SkipGroupMessageWrap = skipGroupMessageWrap.Bool
|
||||
}
|
||||
|
||||
if sendOnPersonalTopic.Valid {
|
||||
message.SendOnPersonalTopic = sendOnPersonalTopic.Bool
|
||||
}
|
||||
|
||||
return message, nil
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) RawMessagesIDsByType(t protobuf.ApplicationMetadataMessage_Type) ([]string, error) {
|
||||
ids := []string{}
|
||||
|
||||
rows, err := db.db.Query(`
|
||||
SELECT
|
||||
id
|
||||
FROM
|
||||
raw_messages
|
||||
WHERE
|
||||
message_type = ?`,
|
||||
t)
|
||||
if err != nil {
|
||||
return ids, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var id string
|
||||
if err := rows.Scan(&id); err != nil {
|
||||
return ids, err
|
||||
}
|
||||
ids = append(ids, id)
|
||||
}
|
||||
|
||||
return ids, nil
|
||||
}
|
||||
|
||||
// MarkAsConfirmed marks all the messages with dataSyncID as confirmed and returns
|
||||
// the messageIDs that can be considered confirmed.
|
||||
// If atLeastOne is set it will return messageid if at least once of the messages
|
||||
// sent has been confirmed
|
||||
func (db RawMessagesPersistence) MarkAsConfirmed(dataSyncID []byte, atLeastOne bool) (messageID types.HexBytes, err error) {
|
||||
tx, err := db.db.BeginTx(context.Background(), &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
// don't shadow original error
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
confirmedAt := time.Now().Unix()
|
||||
_, err = tx.Exec(`UPDATE raw_message_confirmations SET confirmed_at = ? WHERE datasync_id = ? AND confirmed_at = 0`, confirmedAt, dataSyncID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Select any tuple that has a message_id with a datasync_id = ? and that has just been confirmed
|
||||
rows, err := tx.Query(`SELECT message_id,confirmed_at FROM raw_message_confirmations WHERE message_id = (SELECT message_id FROM raw_message_confirmations WHERE datasync_id = ? LIMIT 1)`, dataSyncID)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
confirmedResult := true
|
||||
|
||||
for rows.Next() {
|
||||
var confirmedAt int64
|
||||
err = rows.Scan(&messageID, &confirmedAt)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
confirmed := confirmedAt > 0
|
||||
|
||||
if atLeastOne && confirmed {
|
||||
// We return, as at least one was confirmed
|
||||
return
|
||||
}
|
||||
|
||||
confirmedResult = confirmedResult && confirmed
|
||||
}
|
||||
|
||||
if !confirmedResult {
|
||||
messageID = nil
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) InsertPendingConfirmation(confirmation *RawMessageConfirmation) error {
|
||||
|
||||
_, err := db.db.Exec(`INSERT INTO raw_message_confirmations
|
||||
(datasync_id, message_id, public_key)
|
||||
VALUES
|
||||
(?,?,?)`,
|
||||
confirmation.DataSyncID,
|
||||
confirmation.MessageID,
|
||||
confirmation.PublicKey,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) SaveHashRatchetMessage(groupID []byte, keyID []byte, m *types.Message) error {
|
||||
_, err := db.db.Exec(`INSERT INTO hash_ratchet_encrypted_messages(hash, sig, TTL, timestamp, topic, payload, dst, p2p, padding, group_id, key_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, m.Hash, m.Sig, m.TTL, m.Timestamp, types.TopicTypeToByteArray(m.Topic), m.Payload, m.Dst, m.P2P, m.Padding, groupID, keyID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) GetHashRatchetMessages(keyID []byte) ([]*types.Message, error) {
|
||||
var messages []*types.Message
|
||||
|
||||
rows, err := db.db.Query(`SELECT hash, sig, TTL, timestamp, topic, payload, dst, p2p, padding FROM hash_ratchet_encrypted_messages WHERE key_id = ?`, keyID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
var topic []byte
|
||||
message := &types.Message{}
|
||||
|
||||
err := rows.Scan(&message.Hash, &message.Sig, &message.TTL, &message.Timestamp, &topic, &message.Payload, &message.Dst, &message.P2P, &message.Padding)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
message.Topic = types.BytesToTopic(topic)
|
||||
messages = append(messages, message)
|
||||
}
|
||||
|
||||
return messages, nil
|
||||
}
|
||||
|
||||
func (db RawMessagesPersistence) DeleteHashRatchetMessages(ids [][]byte) error {
|
||||
if len(ids) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
idsArgs := make([]interface{}, 0, len(ids))
|
||||
for _, id := range ids {
|
||||
idsArgs = append(idsArgs, id)
|
||||
}
|
||||
inVector := strings.Repeat("?, ", len(ids)-1) + "?"
|
||||
|
||||
_, err := db.db.Exec("DELETE FROM hash_ratchet_encrypted_messages WHERE hash IN ("+inVector+")", idsArgs...) // nolint: gosec
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *RawMessagesPersistence) IsMessageAlreadyCompleted(hash []byte) (bool, error) {
|
||||
var alreadyCompleted int
|
||||
err := db.db.QueryRow("SELECT COUNT(*) FROM message_segments_completed WHERE hash = ?", hash).Scan(&alreadyCompleted)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return alreadyCompleted > 0, nil
|
||||
}
|
||||
|
||||
func (db *RawMessagesPersistence) SaveMessageSegment(segment *protobuf.SegmentMessage, sigPubKey *ecdsa.PublicKey, timestamp int64) error {
|
||||
sigPubKeyBlob := crypto.CompressPubkey(sigPubKey)
|
||||
|
||||
_, err := db.db.Exec("INSERT INTO message_segments (hash, segment_index, segments_count, sig_pub_key, payload, timestamp) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
segment.EntireMessageHash, segment.Index, segment.SegmentsCount, sigPubKeyBlob, segment.Payload, timestamp)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Get ordered message segments for given hash
|
||||
func (db *RawMessagesPersistence) GetMessageSegments(hash []byte, sigPubKey *ecdsa.PublicKey) ([]*protobuf.SegmentMessage, error) {
|
||||
sigPubKeyBlob := crypto.CompressPubkey(sigPubKey)
|
||||
|
||||
rows, err := db.db.Query("SELECT hash, segment_index, segments_count, payload FROM message_segments WHERE hash = ? AND sig_pub_key = ? ORDER BY segment_index", hash, sigPubKeyBlob)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var segments []*protobuf.SegmentMessage
|
||||
for rows.Next() {
|
||||
var segment protobuf.SegmentMessage
|
||||
err := rows.Scan(&segment.EntireMessageHash, &segment.Index, &segment.SegmentsCount, &segment.Payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
segments = append(segments, &segment)
|
||||
}
|
||||
err = rows.Err()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return segments, nil
|
||||
}
|
||||
|
||||
func (db *RawMessagesPersistence) RemoveMessageSegmentsOlderThan(timestamp int64) error {
|
||||
_, err := db.db.Exec("DELETE FROM message_segments WHERE timestamp < ?", timestamp)
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *RawMessagesPersistence) CompleteMessageSegments(hash []byte, sigPubKey *ecdsa.PublicKey, timestamp int64) error {
|
||||
tx, err := db.db.BeginTx(context.Background(), &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
// don't shadow original error
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
sigPubKeyBlob := crypto.CompressPubkey(sigPubKey)
|
||||
|
||||
_, err = tx.Exec("DELETE FROM message_segments WHERE hash = ? AND sig_pub_key = ?", hash, sigPubKeyBlob)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.Exec("INSERT INTO message_segments_completed (hash, sig_pub_key, timestamp) VALUES (?,?,?)", hash, sigPubKeyBlob, timestamp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (db *RawMessagesPersistence) RemoveMessageSegmentsCompletedOlderThan(timestamp int64) error {
|
||||
_, err := db.db.Exec("DELETE FROM message_segments_completed WHERE timestamp < ?", timestamp)
|
||||
return err
|
||||
}
|
||||
63
vendor/github.com/status-im/status-go/protocol/common/shard/shard.go
generated
vendored
Normal file
63
vendor/github.com/status-im/status-go/protocol/common/shard/shard.go
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
package shard
|
||||
|
||||
import (
|
||||
wakuproto "github.com/waku-org/go-waku/waku/v2/protocol"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type Shard struct {
|
||||
Cluster uint16 `json:"cluster"`
|
||||
Index uint16 `json:"index"`
|
||||
}
|
||||
|
||||
func FromProtobuff(p *protobuf.Shard) *Shard {
|
||||
if p == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &Shard{
|
||||
Cluster: uint16(p.Cluster),
|
||||
Index: uint16(p.Index),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Shard) Protobuffer() *protobuf.Shard {
|
||||
if s == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &protobuf.Shard{
|
||||
Cluster: int32(s.Cluster),
|
||||
Index: int32(s.Index),
|
||||
}
|
||||
}
|
||||
func (s *Shard) PubsubTopic() string {
|
||||
if s != nil {
|
||||
return wakuproto.NewStaticShardingPubsubTopic(s.Cluster, s.Index).String()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func DefaultNonProtectedPubsubTopic() string {
|
||||
return (&Shard{
|
||||
Cluster: MainStatusShardCluster,
|
||||
Index: NonProtectedShardIndex,
|
||||
}).PubsubTopic()
|
||||
}
|
||||
|
||||
const MainStatusShardCluster = 16
|
||||
const DefaultShardIndex = 32
|
||||
const NonProtectedShardIndex = 64
|
||||
const UndefinedShardValue = 0
|
||||
|
||||
func DefaultShardPubsubTopic() string {
|
||||
return wakuproto.NewStaticShardingPubsubTopic(MainStatusShardCluster, DefaultShardIndex).String()
|
||||
}
|
||||
|
||||
func DefaultShard() *Shard {
|
||||
return &Shard{
|
||||
Cluster: MainStatusShardCluster,
|
||||
Index: NonProtectedShardIndex,
|
||||
}
|
||||
}
|
||||
12
vendor/github.com/status-im/status-go/protocol/common/timesource.go
generated
vendored
Normal file
12
vendor/github.com/status-im/status-go/protocol/common/timesource.go
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
package common
|
||||
|
||||
// TimeSource provides a unified way of getting the current time.
|
||||
// The intention is to always use a synchronized time source
|
||||
// between all components of the protocol.
|
||||
//
|
||||
// This is required by Whisper and Waku protocols
|
||||
// which rely on a fact that all peers
|
||||
// have a synchronized time source.
|
||||
type TimeSource interface {
|
||||
GetCurrentTime() uint64
|
||||
}
|
||||
42
vendor/github.com/status-im/status-go/protocol/communities/adaptors.go
generated
vendored
Normal file
42
vendor/github.com/status-im/status-go/protocol/communities/adaptors.go
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
func (o *Community) ToSyncInstallationCommunityProtobuf(clock uint64, communitySettings *CommunitySettings, syncControlNode *protobuf.SyncCommunityControlNode) (*protobuf.SyncInstallationCommunity, error) {
|
||||
wrappedCommunity, err := o.ToProtocolMessageBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var rtjs []*protobuf.SyncCommunityRequestsToJoin
|
||||
reqs := o.RequestsToJoin()
|
||||
for _, req := range reqs {
|
||||
rtjs = append(rtjs, req.ToSyncProtobuf())
|
||||
}
|
||||
|
||||
settings := &protobuf.SyncCommunitySettings{
|
||||
Clock: clock,
|
||||
CommunityId: o.IDString(),
|
||||
HistoryArchiveSupportEnabled: true,
|
||||
}
|
||||
|
||||
if communitySettings != nil {
|
||||
settings.HistoryArchiveSupportEnabled = communitySettings.HistoryArchiveSupportEnabled
|
||||
}
|
||||
|
||||
return &protobuf.SyncInstallationCommunity{
|
||||
Clock: clock,
|
||||
Id: o.ID(),
|
||||
Description: wrappedCommunity,
|
||||
Joined: o.Joined(),
|
||||
JoinedAt: o.JoinedAt(),
|
||||
Verified: o.Verified(),
|
||||
Muted: o.Muted(),
|
||||
RequestsToJoin: rtjs,
|
||||
Settings: settings,
|
||||
ControlNode: syncControlNode,
|
||||
LastOpenedAt: o.LastOpenedAt(),
|
||||
}, nil
|
||||
}
|
||||
154
vendor/github.com/status-im/status-go/protocol/communities/check_permissions_response.go
generated
vendored
Normal file
154
vendor/github.com/status-im/status-go/protocol/communities/check_permissions_response.go
generated
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"sort"
|
||||
|
||||
gethcommon "github.com/ethereum/go-ethereum/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type CheckPermissionsResponse struct {
|
||||
Satisfied bool `json:"satisfied"`
|
||||
Permissions map[string]*PermissionTokenCriteriaResult `json:"permissions"`
|
||||
ValidCombinations []*AccountChainIDsCombination `json:"validCombinations"`
|
||||
NetworksNotSupported bool `json:"networksNotSupported"`
|
||||
}
|
||||
|
||||
type CheckPermissionToJoinResponse = CheckPermissionsResponse
|
||||
|
||||
type HighestRoleResponse struct {
|
||||
Role protobuf.CommunityTokenPermission_Type `json:"type"`
|
||||
Satisfied bool `json:"satisfied"`
|
||||
Criteria []*PermissionTokenCriteriaResult `json:"criteria"`
|
||||
}
|
||||
|
||||
var joiningRoleOrders = map[protobuf.CommunityTokenPermission_Type]int{
|
||||
protobuf.CommunityTokenPermission_BECOME_MEMBER: 1,
|
||||
protobuf.CommunityTokenPermission_BECOME_ADMIN: 2,
|
||||
protobuf.CommunityTokenPermission_BECOME_TOKEN_MASTER: 3,
|
||||
protobuf.CommunityTokenPermission_BECOME_TOKEN_OWNER: 4,
|
||||
}
|
||||
|
||||
type ByRoleDesc []*HighestRoleResponse
|
||||
|
||||
func (a ByRoleDesc) Len() int { return len(a) }
|
||||
func (a ByRoleDesc) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a ByRoleDesc) Less(i, j int) bool {
|
||||
return joiningRoleOrders[a[i].Role] > joiningRoleOrders[a[j].Role]
|
||||
}
|
||||
|
||||
type rolesAndHighestRole struct {
|
||||
Roles []*HighestRoleResponse
|
||||
HighestRole *HighestRoleResponse
|
||||
}
|
||||
|
||||
func calculateRolesAndHighestRole(permissions map[string]*PermissionTokenCriteriaResult) *rolesAndHighestRole {
|
||||
item := &rolesAndHighestRole{}
|
||||
byRoleMap := make(map[protobuf.CommunityTokenPermission_Type]*HighestRoleResponse)
|
||||
for _, p := range permissions {
|
||||
if joiningRoleOrders[p.Role] == 0 {
|
||||
continue
|
||||
}
|
||||
if byRoleMap[p.Role] == nil {
|
||||
byRoleMap[p.Role] = &HighestRoleResponse{
|
||||
Role: p.Role,
|
||||
}
|
||||
}
|
||||
|
||||
satisfied := true
|
||||
for _, tr := range p.TokenRequirements {
|
||||
if !tr.Satisfied {
|
||||
satisfied = false
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if satisfied {
|
||||
byRoleMap[p.Role].Satisfied = true
|
||||
// we prepend
|
||||
byRoleMap[p.Role].Criteria = append([]*PermissionTokenCriteriaResult{p}, byRoleMap[p.Role].Criteria...)
|
||||
} else {
|
||||
// we append then
|
||||
byRoleMap[p.Role].Criteria = append(byRoleMap[p.Role].Criteria, p)
|
||||
}
|
||||
}
|
||||
if byRoleMap[protobuf.CommunityTokenPermission_BECOME_MEMBER] == nil {
|
||||
byRoleMap[protobuf.CommunityTokenPermission_BECOME_MEMBER] = &HighestRoleResponse{Satisfied: true, Role: protobuf.CommunityTokenPermission_BECOME_MEMBER}
|
||||
}
|
||||
for _, p := range byRoleMap {
|
||||
item.Roles = append(item.Roles, p)
|
||||
}
|
||||
|
||||
sort.Sort(ByRoleDesc(item.Roles))
|
||||
for _, r := range item.Roles {
|
||||
if r.Satisfied {
|
||||
item.HighestRole = r
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
return item
|
||||
}
|
||||
|
||||
func (c *CheckPermissionsResponse) MarshalJSON() ([]byte, error) {
|
||||
type CheckPermissionsTypeAlias struct {
|
||||
Satisfied bool `json:"satisfied"`
|
||||
Permissions map[string]*PermissionTokenCriteriaResult `json:"permissions"`
|
||||
ValidCombinations []*AccountChainIDsCombination `json:"validCombinations"`
|
||||
Roles []*HighestRoleResponse `json:"roles"`
|
||||
HighestRole *HighestRoleResponse `json:"highestRole"`
|
||||
NetworksNotSupported bool `json:"networksNotSupported"`
|
||||
}
|
||||
c.calculateSatisfied()
|
||||
item := &CheckPermissionsTypeAlias{
|
||||
Satisfied: c.Satisfied,
|
||||
Permissions: c.Permissions,
|
||||
ValidCombinations: c.ValidCombinations,
|
||||
NetworksNotSupported: c.NetworksNotSupported,
|
||||
}
|
||||
rolesAndHighestRole := calculateRolesAndHighestRole(c.Permissions)
|
||||
|
||||
item.Roles = rolesAndHighestRole.Roles
|
||||
item.HighestRole = rolesAndHighestRole.HighestRole
|
||||
return json.Marshal(item)
|
||||
}
|
||||
|
||||
type TokenRequirementResponse struct {
|
||||
Satisfied bool `json:"satisfied"`
|
||||
TokenCriteria *protobuf.TokenCriteria `json:"criteria"`
|
||||
}
|
||||
|
||||
type PermissionTokenCriteriaResult struct {
|
||||
Role protobuf.CommunityTokenPermission_Type `json:"roles"`
|
||||
TokenRequirements []TokenRequirementResponse `json:"tokenRequirement"`
|
||||
Criteria []bool `json:"criteria"`
|
||||
}
|
||||
|
||||
type AccountChainIDsCombination struct {
|
||||
Address gethcommon.Address `json:"address"`
|
||||
ChainIDs []uint64 `json:"chainIds"`
|
||||
}
|
||||
|
||||
func (c *CheckPermissionsResponse) calculateSatisfied() {
|
||||
if len(c.Permissions) == 0 {
|
||||
c.Satisfied = true
|
||||
return
|
||||
}
|
||||
|
||||
c.Satisfied = false
|
||||
for _, p := range c.Permissions {
|
||||
satisfied := true
|
||||
for _, criteria := range p.Criteria {
|
||||
if !criteria {
|
||||
satisfied = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if satisfied {
|
||||
c.Satisfied = true
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
2428
vendor/github.com/status-im/status-go/protocol/communities/community.go
generated
vendored
Normal file
2428
vendor/github.com/status-im/status-go/protocol/communities/community.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
543
vendor/github.com/status-im/status-go/protocol/communities/community_categories.go
generated
vendored
Normal file
543
vendor/github.com/status-im/status-go/protocol/communities/community_categories.go
generated
vendored
Normal file
@@ -0,0 +1,543 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
func (o *Community) ChatsByCategoryID(categoryID string) []string {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
var chatIDs []string
|
||||
if o.config == nil || o.config.CommunityDescription == nil {
|
||||
return chatIDs
|
||||
}
|
||||
|
||||
for chatID, chat := range o.config.CommunityDescription.Chats {
|
||||
if chat.CategoryId == categoryID {
|
||||
chatIDs = append(chatIDs, chatID)
|
||||
}
|
||||
}
|
||||
return chatIDs
|
||||
}
|
||||
|
||||
func (o *Community) CommunityChatsIDs() []string {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
var chatIDs []string
|
||||
if o.config == nil || o.config.CommunityDescription == nil {
|
||||
return chatIDs
|
||||
}
|
||||
|
||||
for chatID := range o.config.CommunityDescription.Chats {
|
||||
chatIDs = append(chatIDs, chatID)
|
||||
}
|
||||
return chatIDs
|
||||
}
|
||||
|
||||
func (o *Community) CreateCategory(categoryID string, categoryName string, chatIDs []string) (*CommunityChanges, error) {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
|
||||
if !(o.IsControlNode() || o.hasPermissionToSendCommunityEvent(protobuf.CommunityEvent_COMMUNITY_CATEGORY_CREATE)) {
|
||||
return nil, ErrNotAuthorized
|
||||
}
|
||||
|
||||
changes, err := o.createCategory(categoryID, categoryName, chatIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
changes.CategoriesAdded[categoryID] = o.config.CommunityDescription.Categories[categoryID]
|
||||
for i, cid := range chatIDs {
|
||||
changes.ChatsModified[cid] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
CategoryModified: categoryID,
|
||||
PositionModified: i,
|
||||
}
|
||||
}
|
||||
|
||||
if o.IsControlNode() {
|
||||
o.increaseClock()
|
||||
} else {
|
||||
err := o.addNewCommunityEvent(o.ToCreateCategoryCommunityEvent(categoryID, categoryName, chatIDs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) EditCategory(categoryID string, categoryName string, chatIDs []string) (*CommunityChanges, error) {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
|
||||
if !(o.IsControlNode() || o.hasPermissionToSendCommunityEvent(protobuf.CommunityEvent_COMMUNITY_CATEGORY_EDIT)) {
|
||||
return nil, ErrNotAuthorized
|
||||
}
|
||||
|
||||
changes, err := o.editCategory(categoryID, categoryName, chatIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
changes.CategoriesModified[categoryID] = o.config.CommunityDescription.Categories[categoryID]
|
||||
for i, cid := range chatIDs {
|
||||
changes.ChatsModified[cid] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
CategoryModified: categoryID,
|
||||
PositionModified: i,
|
||||
}
|
||||
}
|
||||
|
||||
if o.IsControlNode() {
|
||||
o.increaseClock()
|
||||
} else {
|
||||
err := o.addNewCommunityEvent(o.ToEditCategoryCommunityEvent(categoryID, categoryName, chatIDs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) ReorderCategories(categoryID string, newPosition int) (*CommunityChanges, error) {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
|
||||
if !(o.IsControlNode() || o.hasPermissionToSendCommunityEvent(protobuf.CommunityEvent_COMMUNITY_CATEGORY_REORDER)) {
|
||||
return nil, ErrNotAuthorized
|
||||
}
|
||||
|
||||
changes, err := o.reorderCategories(categoryID, newPosition)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if o.IsControlNode() {
|
||||
o.increaseClock()
|
||||
} else {
|
||||
err := o.addNewCommunityEvent(o.ToReorderCategoryCommunityEvent(categoryID, newPosition))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) setModifiedCategories(changes *CommunityChanges, s sortSlice) {
|
||||
sort.Sort(s)
|
||||
for i, catSortHelper := range s {
|
||||
if o.config.CommunityDescription.Categories[catSortHelper.catID].Position != int32(i) {
|
||||
o.config.CommunityDescription.Categories[catSortHelper.catID].Position = int32(i)
|
||||
changes.CategoriesModified[catSortHelper.catID] = o.config.CommunityDescription.Categories[catSortHelper.catID]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ReorderChat(categoryID string, chatID string, newPosition int) (*CommunityChanges, error) {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
|
||||
if !(o.IsControlNode() || o.hasPermissionToSendCommunityEvent(protobuf.CommunityEvent_COMMUNITY_CHANNEL_REORDER)) {
|
||||
return nil, ErrNotAuthorized
|
||||
}
|
||||
|
||||
changes, err := o.reorderChat(categoryID, chatID, newPosition)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if o.IsControlNode() {
|
||||
o.increaseClock()
|
||||
} else {
|
||||
err := o.addNewCommunityEvent(o.ToReorderChannelCommunityEvent(categoryID, chatID, newPosition))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) SortCategoryChats(changes *CommunityChanges, categoryID string) {
|
||||
var catChats []string
|
||||
for k, c := range o.config.CommunityDescription.Chats {
|
||||
if c.CategoryId == categoryID {
|
||||
catChats = append(catChats, k)
|
||||
}
|
||||
}
|
||||
|
||||
sortedChats := make(sortSlice, 0, len(catChats))
|
||||
for _, k := range catChats {
|
||||
sortedChats = append(sortedChats, sorterHelperIdx{
|
||||
pos: o.config.CommunityDescription.Chats[k].Position,
|
||||
chatID: k,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Sort(sortedChats)
|
||||
|
||||
for i, chatSortHelper := range sortedChats {
|
||||
if o.config.CommunityDescription.Chats[chatSortHelper.chatID].Position != int32(i) {
|
||||
o.config.CommunityDescription.Chats[chatSortHelper.chatID].Position = int32(i)
|
||||
if changes.ChatsModified[chatSortHelper.chatID] != nil {
|
||||
changes.ChatsModified[chatSortHelper.chatID].PositionModified = i
|
||||
} else {
|
||||
changes.ChatsModified[chatSortHelper.chatID] = &CommunityChatChanges{
|
||||
PositionModified: i,
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) insertAndSort(changes *CommunityChanges, oldCategoryID string, categoryID string, chatID string, chat *protobuf.CommunityChat, newPosition int) {
|
||||
// We sort the chats here because maps are not guaranteed to keep order
|
||||
var catChats []string
|
||||
sortedChats := make(sortSlice, 0, len(o.config.CommunityDescription.Chats))
|
||||
for k, v := range o.config.CommunityDescription.Chats {
|
||||
sortedChats = append(sortedChats, sorterHelperIdx{
|
||||
pos: v.Position,
|
||||
chatID: k,
|
||||
})
|
||||
}
|
||||
sort.Sort(sortedChats)
|
||||
for _, k := range sortedChats {
|
||||
if o.config.CommunityDescription.Chats[k.chatID].CategoryId == categoryID {
|
||||
catChats = append(catChats, k.chatID)
|
||||
}
|
||||
}
|
||||
|
||||
if newPosition > 0 && newPosition >= len(catChats) {
|
||||
newPosition = len(catChats) - 1
|
||||
} else if newPosition < 0 {
|
||||
newPosition = 0
|
||||
}
|
||||
|
||||
decrease := false
|
||||
if chat.Position > int32(newPosition) {
|
||||
decrease = true
|
||||
}
|
||||
|
||||
for k, v := range o.config.CommunityDescription.Chats {
|
||||
if k != chatID && newPosition == int(v.Position) && v.CategoryId == categoryID {
|
||||
if oldCategoryID == categoryID {
|
||||
if decrease {
|
||||
v.Position++
|
||||
} else {
|
||||
v.Position--
|
||||
}
|
||||
} else {
|
||||
v.Position++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
idx := -1
|
||||
currChatID := ""
|
||||
var sortedChatIDs []string
|
||||
for i, k := range catChats {
|
||||
if o.config.CommunityDescription.Chats[k] != chat && ((decrease && o.config.CommunityDescription.Chats[k].Position < int32(newPosition)) || (!decrease && o.config.CommunityDescription.Chats[k].Position <= int32(newPosition))) {
|
||||
sortedChatIDs = append(sortedChatIDs, k)
|
||||
} else {
|
||||
if o.config.CommunityDescription.Chats[k] == chat {
|
||||
idx = i
|
||||
currChatID = k
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sortedChatIDs = append(sortedChatIDs, currChatID)
|
||||
|
||||
for i, k := range catChats {
|
||||
if i == idx || (decrease && o.config.CommunityDescription.Chats[k].Position < int32(newPosition)) || (!decrease && o.config.CommunityDescription.Chats[k].Position <= int32(newPosition)) {
|
||||
continue
|
||||
}
|
||||
sortedChatIDs = append(sortedChatIDs, k)
|
||||
}
|
||||
|
||||
for i, sortedChatID := range sortedChatIDs {
|
||||
if o.config.CommunityDescription.Chats[sortedChatID].Position != int32(i) {
|
||||
o.config.CommunityDescription.Chats[sortedChatID].Position = int32(i)
|
||||
if changes.ChatsModified[sortedChatID] != nil {
|
||||
changes.ChatsModified[sortedChatID].PositionModified = i
|
||||
} else {
|
||||
changes.ChatsModified[sortedChatID] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
PositionModified: i,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) getCategoryChatCount(categoryID string) int {
|
||||
result := 0
|
||||
for _, chat := range o.config.CommunityDescription.Chats {
|
||||
if chat.CategoryId == categoryID {
|
||||
result = result + 1
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (o *Community) DeleteCategory(categoryID string) (*CommunityChanges, error) {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
|
||||
if !(o.IsControlNode() || o.hasPermissionToSendCommunityEvent(protobuf.CommunityEvent_COMMUNITY_CATEGORY_DELETE)) {
|
||||
return nil, ErrNotAuthorized
|
||||
}
|
||||
|
||||
changes, err := o.deleteCategory(categoryID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if o.IsControlNode() {
|
||||
o.increaseClock()
|
||||
} else {
|
||||
err := o.addNewCommunityEvent(o.ToDeleteCategoryCommunityEvent(categoryID))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) createCategory(categoryID string, categoryName string, chatIDs []string) (*CommunityChanges, error) {
|
||||
if o.config.CommunityDescription.Categories == nil {
|
||||
o.config.CommunityDescription.Categories = make(map[string]*protobuf.CommunityCategory)
|
||||
}
|
||||
if _, ok := o.config.CommunityDescription.Categories[categoryID]; ok {
|
||||
return nil, ErrCategoryAlreadyExists
|
||||
}
|
||||
|
||||
for _, cid := range chatIDs {
|
||||
c, exists := o.config.CommunityDescription.Chats[cid]
|
||||
if !exists {
|
||||
return nil, ErrChatNotFound
|
||||
}
|
||||
|
||||
if exists && c.CategoryId != categoryID && c.CategoryId != "" {
|
||||
return nil, ErrChatAlreadyAssigned
|
||||
}
|
||||
}
|
||||
|
||||
changes := o.emptyCommunityChanges()
|
||||
|
||||
o.config.CommunityDescription.Categories[categoryID] = &protobuf.CommunityCategory{
|
||||
CategoryId: categoryID,
|
||||
Name: categoryName,
|
||||
Position: int32(len(o.config.CommunityDescription.Categories)),
|
||||
}
|
||||
|
||||
for i, cid := range chatIDs {
|
||||
o.config.CommunityDescription.Chats[cid].CategoryId = categoryID
|
||||
o.config.CommunityDescription.Chats[cid].Position = int32(i)
|
||||
}
|
||||
|
||||
o.SortCategoryChats(changes, "")
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) editCategory(categoryID string, categoryName string, chatIDs []string) (*CommunityChanges, error) {
|
||||
if o.config.CommunityDescription.Categories == nil {
|
||||
o.config.CommunityDescription.Categories = make(map[string]*protobuf.CommunityCategory)
|
||||
}
|
||||
if _, ok := o.config.CommunityDescription.Categories[categoryID]; !ok {
|
||||
return nil, ErrCategoryNotFound
|
||||
}
|
||||
|
||||
for _, cid := range chatIDs {
|
||||
c, exists := o.config.CommunityDescription.Chats[cid]
|
||||
if !exists {
|
||||
return nil, ErrChatNotFound
|
||||
}
|
||||
|
||||
if exists && c.CategoryId != categoryID && c.CategoryId != "" {
|
||||
return nil, ErrChatAlreadyAssigned
|
||||
}
|
||||
}
|
||||
|
||||
changes := o.emptyCommunityChanges()
|
||||
|
||||
emptyCatLen := o.getCategoryChatCount("")
|
||||
|
||||
// remove any chat that might have been assigned before and now it's not part of the category
|
||||
var chatsToRemove []string
|
||||
for k, chat := range o.config.CommunityDescription.Chats {
|
||||
if chat.CategoryId == categoryID {
|
||||
found := false
|
||||
for _, c := range chatIDs {
|
||||
if k == c {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
chat.CategoryId = ""
|
||||
chatsToRemove = append(chatsToRemove, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
o.config.CommunityDescription.Categories[categoryID].Name = categoryName
|
||||
|
||||
for i, cid := range chatIDs {
|
||||
o.config.CommunityDescription.Chats[cid].CategoryId = categoryID
|
||||
o.config.CommunityDescription.Chats[cid].Position = int32(i)
|
||||
}
|
||||
|
||||
for i, cid := range chatsToRemove {
|
||||
o.config.CommunityDescription.Chats[cid].Position = int32(emptyCatLen + i)
|
||||
changes.ChatsModified[cid] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
CategoryModified: "",
|
||||
PositionModified: int(o.config.CommunityDescription.Chats[cid].Position),
|
||||
}
|
||||
}
|
||||
|
||||
o.SortCategoryChats(changes, "")
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) deleteCategory(categoryID string) (*CommunityChanges, error) {
|
||||
if _, exists := o.config.CommunityDescription.Categories[categoryID]; !exists {
|
||||
return nil, ErrCategoryNotFound
|
||||
}
|
||||
|
||||
changes := o.emptyCommunityChanges()
|
||||
|
||||
emptyCategoryChatCount := o.getCategoryChatCount("")
|
||||
i := 0
|
||||
for _, chat := range o.config.CommunityDescription.Chats {
|
||||
if chat.CategoryId == categoryID {
|
||||
i++
|
||||
chat.CategoryId = ""
|
||||
chat.Position = int32(emptyCategoryChatCount + i)
|
||||
}
|
||||
}
|
||||
|
||||
o.SortCategoryChats(changes, "")
|
||||
|
||||
delete(o.config.CommunityDescription.Categories, categoryID)
|
||||
|
||||
changes.CategoriesRemoved = append(changes.CategoriesRemoved, categoryID)
|
||||
|
||||
// Reorder
|
||||
s := make(sortSlice, 0, len(o.config.CommunityDescription.Categories))
|
||||
for _, cat := range o.config.CommunityDescription.Categories {
|
||||
s = append(s, sorterHelperIdx{
|
||||
pos: cat.Position,
|
||||
catID: cat.CategoryId,
|
||||
})
|
||||
}
|
||||
|
||||
o.setModifiedCategories(changes, s)
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) reorderCategories(categoryID string, newPosition int) (*CommunityChanges, error) {
|
||||
if _, exists := o.config.CommunityDescription.Categories[categoryID]; !exists {
|
||||
return nil, ErrCategoryNotFound
|
||||
}
|
||||
|
||||
if newPosition > 0 && newPosition >= len(o.config.CommunityDescription.Categories) {
|
||||
newPosition = len(o.config.CommunityDescription.Categories) - 1
|
||||
} else if newPosition < 0 {
|
||||
newPosition = 0
|
||||
}
|
||||
|
||||
category := o.config.CommunityDescription.Categories[categoryID]
|
||||
if category.Position == int32(newPosition) {
|
||||
return nil, ErrNoChangeInPosition
|
||||
}
|
||||
|
||||
decrease := false
|
||||
if category.Position > int32(newPosition) {
|
||||
decrease = true
|
||||
}
|
||||
|
||||
// Sorting the categories because maps are not guaranteed to keep order
|
||||
s := make(sortSlice, 0, len(o.config.CommunityDescription.Categories))
|
||||
for k, v := range o.config.CommunityDescription.Categories {
|
||||
s = append(s, sorterHelperIdx{
|
||||
pos: v.Position,
|
||||
catID: k,
|
||||
})
|
||||
}
|
||||
sort.Sort(s)
|
||||
var communityCategories []*protobuf.CommunityCategory
|
||||
for _, currCat := range s {
|
||||
communityCategories = append(communityCategories, o.config.CommunityDescription.Categories[currCat.catID])
|
||||
}
|
||||
|
||||
var sortedCategoryIDs []string
|
||||
for _, v := range communityCategories {
|
||||
if v != category && ((decrease && v.Position < int32(newPosition)) || (!decrease && v.Position <= int32(newPosition))) {
|
||||
sortedCategoryIDs = append(sortedCategoryIDs, v.CategoryId)
|
||||
}
|
||||
}
|
||||
|
||||
sortedCategoryIDs = append(sortedCategoryIDs, categoryID)
|
||||
|
||||
for _, v := range communityCategories {
|
||||
if v.CategoryId == categoryID || (decrease && v.Position < int32(newPosition)) || (!decrease && v.Position <= int32(newPosition)) {
|
||||
continue
|
||||
}
|
||||
sortedCategoryIDs = append(sortedCategoryIDs, v.CategoryId)
|
||||
}
|
||||
|
||||
s = make(sortSlice, 0, len(o.config.CommunityDescription.Categories))
|
||||
for i, k := range sortedCategoryIDs {
|
||||
s = append(s, sorterHelperIdx{
|
||||
pos: int32(i),
|
||||
catID: k,
|
||||
})
|
||||
}
|
||||
|
||||
changes := o.emptyCommunityChanges()
|
||||
|
||||
o.setModifiedCategories(changes, s)
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
|
||||
func (o *Community) reorderChat(categoryID string, chatID string, newPosition int) (*CommunityChanges, error) {
|
||||
if categoryID != "" {
|
||||
if _, exists := o.config.CommunityDescription.Categories[categoryID]; !exists {
|
||||
return nil, ErrCategoryNotFound
|
||||
}
|
||||
}
|
||||
|
||||
var chat *protobuf.CommunityChat
|
||||
var exists bool
|
||||
if chat, exists = o.config.CommunityDescription.Chats[chatID]; !exists {
|
||||
return nil, ErrChatNotFound
|
||||
}
|
||||
|
||||
oldCategoryID := chat.CategoryId
|
||||
chat.CategoryId = categoryID
|
||||
|
||||
changes := o.emptyCommunityChanges()
|
||||
|
||||
o.SortCategoryChats(changes, oldCategoryID)
|
||||
o.insertAndSort(changes, oldCategoryID, categoryID, chatID, chat, newPosition)
|
||||
|
||||
return changes, nil
|
||||
}
|
||||
270
vendor/github.com/status-im/status-go/protocol/communities/community_changes.go
generated
vendored
Normal file
270
vendor/github.com/status-im/status-go/protocol/communities/community_changes.go
generated
vendored
Normal file
@@ -0,0 +1,270 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type CommunityChatChanges struct {
|
||||
ChatModified *protobuf.CommunityChat
|
||||
MembersAdded map[string]*protobuf.CommunityMember
|
||||
MembersRemoved map[string]*protobuf.CommunityMember
|
||||
CategoryModified string
|
||||
PositionModified int
|
||||
FirstMessageTimestampModified uint32
|
||||
}
|
||||
|
||||
type CommunityChanges struct {
|
||||
Community *Community `json:"community"`
|
||||
|
||||
ControlNodeChanged *ecdsa.PublicKey `json:"controlNodeChanged"`
|
||||
|
||||
MembersAdded map[string]*protobuf.CommunityMember `json:"membersAdded"`
|
||||
MembersRemoved map[string]*protobuf.CommunityMember `json:"membersRemoved"`
|
||||
|
||||
TokenPermissionsAdded map[string]*CommunityTokenPermission `json:"tokenPermissionsAdded"`
|
||||
TokenPermissionsModified map[string]*CommunityTokenPermission `json:"tokenPermissionsModified"`
|
||||
TokenPermissionsRemoved map[string]*CommunityTokenPermission `json:"tokenPermissionsRemoved"`
|
||||
|
||||
ChatsRemoved map[string]*protobuf.CommunityChat `json:"chatsRemoved"`
|
||||
ChatsAdded map[string]*protobuf.CommunityChat `json:"chatsAdded"`
|
||||
ChatsModified map[string]*CommunityChatChanges `json:"chatsModified"`
|
||||
|
||||
CategoriesRemoved []string `json:"categoriesRemoved"`
|
||||
CategoriesAdded map[string]*protobuf.CommunityCategory `json:"categoriesAdded"`
|
||||
CategoriesModified map[string]*protobuf.CommunityCategory `json:"categoriesModified"`
|
||||
|
||||
MemberWalletsRemoved []string `json:"memberWalletsRemoved"`
|
||||
MemberWalletsAdded map[string][]*protobuf.RevealedAccount `json:"memberWalletsAdded"`
|
||||
|
||||
// ShouldMemberJoin indicates whether the user should join this community
|
||||
// automatically
|
||||
ShouldMemberJoin bool `json:"memberAdded"`
|
||||
|
||||
// MemberKicked indicates whether the user has been kicked out
|
||||
MemberKicked bool `json:"memberRemoved"`
|
||||
}
|
||||
|
||||
func EmptyCommunityChanges() *CommunityChanges {
|
||||
return &CommunityChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
|
||||
TokenPermissionsAdded: make(map[string]*CommunityTokenPermission),
|
||||
TokenPermissionsModified: make(map[string]*CommunityTokenPermission),
|
||||
TokenPermissionsRemoved: make(map[string]*CommunityTokenPermission),
|
||||
|
||||
ChatsRemoved: make(map[string]*protobuf.CommunityChat),
|
||||
ChatsAdded: make(map[string]*protobuf.CommunityChat),
|
||||
ChatsModified: make(map[string]*CommunityChatChanges),
|
||||
|
||||
CategoriesRemoved: []string{},
|
||||
CategoriesAdded: make(map[string]*protobuf.CommunityCategory),
|
||||
CategoriesModified: make(map[string]*protobuf.CommunityCategory),
|
||||
|
||||
MemberWalletsRemoved: []string{},
|
||||
MemberWalletsAdded: make(map[string][]*protobuf.RevealedAccount),
|
||||
}
|
||||
}
|
||||
|
||||
func (c *CommunityChanges) HasNewMember(identity string) bool {
|
||||
if len(c.MembersAdded) == 0 {
|
||||
return false
|
||||
}
|
||||
_, ok := c.MembersAdded[identity]
|
||||
return ok
|
||||
}
|
||||
|
||||
func (c *CommunityChanges) HasMemberLeft(identity string) bool {
|
||||
if len(c.MembersRemoved) == 0 {
|
||||
return false
|
||||
}
|
||||
_, ok := c.MembersRemoved[identity]
|
||||
return ok
|
||||
}
|
||||
|
||||
func EvaluateCommunityChanges(origin, modified *Community) *CommunityChanges {
|
||||
changes := evaluateCommunityChangesByDescription(origin.Description(), modified.Description())
|
||||
|
||||
if origin.ControlNode() != nil && !modified.ControlNode().Equal(origin.ControlNode()) {
|
||||
changes.ControlNodeChanged = modified.ControlNode()
|
||||
}
|
||||
|
||||
originTokenPermissions := origin.tokenPermissions()
|
||||
modifiedTokenPermissions := modified.tokenPermissions()
|
||||
|
||||
// Check for modified or removed token permissions
|
||||
for id, originPermission := range originTokenPermissions {
|
||||
if modifiedPermission := modifiedTokenPermissions[id]; modifiedPermission != nil {
|
||||
if !modifiedPermission.Equals(originPermission) {
|
||||
changes.TokenPermissionsModified[id] = modifiedPermission
|
||||
}
|
||||
} else {
|
||||
changes.TokenPermissionsRemoved[id] = originPermission
|
||||
}
|
||||
}
|
||||
|
||||
// Check for added token permissions
|
||||
for id, permission := range modifiedTokenPermissions {
|
||||
if _, ok := originTokenPermissions[id]; !ok {
|
||||
changes.TokenPermissionsAdded[id] = permission
|
||||
}
|
||||
}
|
||||
|
||||
changes.Community = modified
|
||||
return changes
|
||||
}
|
||||
|
||||
func evaluateCommunityChangesByDescription(origin, modified *protobuf.CommunityDescription) *CommunityChanges {
|
||||
changes := EmptyCommunityChanges()
|
||||
|
||||
// Check for new members at the org level
|
||||
for pk, member := range modified.Members {
|
||||
if _, ok := origin.Members[pk]; !ok {
|
||||
if changes.MembersAdded == nil {
|
||||
changes.MembersAdded = make(map[string]*protobuf.CommunityMember)
|
||||
}
|
||||
changes.MembersAdded[pk] = member
|
||||
}
|
||||
}
|
||||
|
||||
// Check for removed members at the org level
|
||||
for pk, member := range origin.Members {
|
||||
if _, ok := modified.Members[pk]; !ok {
|
||||
if changes.MembersRemoved == nil {
|
||||
changes.MembersRemoved = make(map[string]*protobuf.CommunityMember)
|
||||
}
|
||||
changes.MembersRemoved[pk] = member
|
||||
}
|
||||
}
|
||||
|
||||
// check for removed chats
|
||||
for chatID, chat := range origin.Chats {
|
||||
if modified.Chats == nil {
|
||||
modified.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
if _, ok := modified.Chats[chatID]; !ok {
|
||||
if changes.ChatsRemoved == nil {
|
||||
changes.ChatsRemoved = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
changes.ChatsRemoved[chatID] = chat
|
||||
}
|
||||
}
|
||||
|
||||
for chatID, chat := range modified.Chats {
|
||||
if origin.Chats == nil {
|
||||
origin.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
if _, ok := origin.Chats[chatID]; !ok {
|
||||
if changes.ChatsAdded == nil {
|
||||
changes.ChatsAdded = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
changes.ChatsAdded[chatID] = chat
|
||||
} else {
|
||||
// Check for members added
|
||||
for pk, member := range modified.Chats[chatID].Members {
|
||||
if _, ok := origin.Chats[chatID].Members[pk]; !ok {
|
||||
if changes.ChatsModified[chatID] == nil {
|
||||
changes.ChatsModified[chatID] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
}
|
||||
}
|
||||
|
||||
changes.ChatsModified[chatID].MembersAdded[pk] = member
|
||||
}
|
||||
}
|
||||
|
||||
// check for members removed
|
||||
for pk, member := range origin.Chats[chatID].Members {
|
||||
if _, ok := modified.Chats[chatID].Members[pk]; !ok {
|
||||
if changes.ChatsModified[chatID] == nil {
|
||||
changes.ChatsModified[chatID] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
}
|
||||
}
|
||||
|
||||
changes.ChatsModified[chatID].MembersRemoved[pk] = member
|
||||
}
|
||||
}
|
||||
|
||||
// check if first message timestamp was modified
|
||||
if origin.Chats[chatID].Identity.FirstMessageTimestamp !=
|
||||
modified.Chats[chatID].Identity.FirstMessageTimestamp {
|
||||
if changes.ChatsModified[chatID] == nil {
|
||||
changes.ChatsModified[chatID] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
}
|
||||
}
|
||||
changes.ChatsModified[chatID].FirstMessageTimestampModified = modified.Chats[chatID].Identity.FirstMessageTimestamp
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for categories that were removed
|
||||
for categoryID := range origin.Categories {
|
||||
if modified.Categories == nil {
|
||||
modified.Categories = make(map[string]*protobuf.CommunityCategory)
|
||||
}
|
||||
|
||||
if modified.Chats == nil {
|
||||
modified.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
if _, ok := modified.Categories[categoryID]; !ok {
|
||||
changes.CategoriesRemoved = append(changes.CategoriesRemoved, categoryID)
|
||||
}
|
||||
|
||||
if origin.Chats == nil {
|
||||
origin.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
}
|
||||
|
||||
// Check for categories that were added
|
||||
for categoryID, category := range modified.Categories {
|
||||
if origin.Categories == nil {
|
||||
origin.Categories = make(map[string]*protobuf.CommunityCategory)
|
||||
}
|
||||
if _, ok := origin.Categories[categoryID]; !ok {
|
||||
if changes.CategoriesAdded == nil {
|
||||
changes.CategoriesAdded = make(map[string]*protobuf.CommunityCategory)
|
||||
}
|
||||
|
||||
changes.CategoriesAdded[categoryID] = category
|
||||
} else {
|
||||
if origin.Categories[categoryID].Name != category.Name || origin.Categories[categoryID].Position != category.Position {
|
||||
changes.CategoriesModified[categoryID] = category
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for chat categories that were modified
|
||||
for chatID, chat := range modified.Chats {
|
||||
if origin.Chats == nil {
|
||||
origin.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
if _, ok := origin.Chats[chatID]; !ok {
|
||||
continue // It's a new chat
|
||||
}
|
||||
|
||||
if origin.Chats[chatID].CategoryId != chat.CategoryId {
|
||||
if changes.ChatsModified[chatID] == nil {
|
||||
changes.ChatsModified[chatID] = &CommunityChatChanges{
|
||||
MembersAdded: make(map[string]*protobuf.CommunityMember),
|
||||
MembersRemoved: make(map[string]*protobuf.CommunityMember),
|
||||
}
|
||||
}
|
||||
|
||||
changes.ChatsModified[chatID].CategoryModified = chat.CategoryId
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
111
vendor/github.com/status-im/status-go/protocol/communities/community_description_encryption.go
generated
vendored
Normal file
111
vendor/github.com/status-im/status-go/protocol/communities/community_description_encryption.go
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type DescriptionEncryptor interface {
|
||||
encryptCommunityDescription(community *Community, d *protobuf.CommunityDescription) (string, []byte, error)
|
||||
encryptCommunityDescriptionChannel(community *Community, channelID string, d *protobuf.CommunityDescription) (string, []byte, error)
|
||||
decryptCommunityDescription(keyIDSeqNo string, d []byte) (*DecryptCommunityResponse, error)
|
||||
}
|
||||
|
||||
// Encrypts members and chats
|
||||
func encryptDescription(encryptor DescriptionEncryptor, community *Community, description *protobuf.CommunityDescription) error {
|
||||
description.PrivateData = make(map[string][]byte)
|
||||
|
||||
for channelID, channel := range description.Chats {
|
||||
if !community.channelEncrypted(channelID) {
|
||||
continue
|
||||
}
|
||||
|
||||
descriptionToEncrypt := &protobuf.CommunityDescription{
|
||||
Chats: map[string]*protobuf.CommunityChat{
|
||||
channelID: proto.Clone(channel).(*protobuf.CommunityChat),
|
||||
},
|
||||
}
|
||||
|
||||
keyIDSeqNo, encryptedDescription, err := encryptor.encryptCommunityDescriptionChannel(community, channelID, descriptionToEncrypt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Set private data and cleanup unencrypted channel's members
|
||||
description.PrivateData[keyIDSeqNo] = encryptedDescription
|
||||
channel.Members = make(map[string]*protobuf.CommunityMember)
|
||||
}
|
||||
|
||||
if community.Encrypted() {
|
||||
descriptionToEncrypt := &protobuf.CommunityDescription{
|
||||
Members: description.Members,
|
||||
Chats: description.Chats,
|
||||
}
|
||||
|
||||
keyIDSeqNo, encryptedDescription, err := encryptor.encryptCommunityDescription(community, descriptionToEncrypt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Set private data and cleanup unencrypted members and chats
|
||||
description.PrivateData[keyIDSeqNo] = encryptedDescription
|
||||
description.Members = make(map[string]*protobuf.CommunityMember)
|
||||
description.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type CommunityPrivateDataFailedToDecrypt struct {
|
||||
GroupID []byte
|
||||
KeyID []byte
|
||||
}
|
||||
|
||||
// Decrypts members and chats
|
||||
func decryptDescription(id types.HexBytes, encryptor DescriptionEncryptor, description *protobuf.CommunityDescription, logger *zap.Logger) ([]*CommunityPrivateDataFailedToDecrypt, error) {
|
||||
if len(description.PrivateData) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var failedToDecrypt []*CommunityPrivateDataFailedToDecrypt
|
||||
|
||||
for keyIDSeqNo, encryptedDescription := range description.PrivateData {
|
||||
decryptedDescriptionResponse, err := encryptor.decryptCommunityDescription(keyIDSeqNo, encryptedDescription)
|
||||
if decryptedDescriptionResponse != nil && !decryptedDescriptionResponse.Decrypted {
|
||||
failedToDecrypt = append(failedToDecrypt, &CommunityPrivateDataFailedToDecrypt{GroupID: id, KeyID: decryptedDescriptionResponse.KeyID})
|
||||
}
|
||||
if err != nil {
|
||||
// ignore error, try to decrypt next data
|
||||
logger.Debug("failed to decrypt community private data", zap.String("keyIDSeqNo", keyIDSeqNo), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
decryptedDescription := decryptedDescriptionResponse.Description
|
||||
|
||||
for pk, member := range decryptedDescription.Members {
|
||||
if description.Members == nil {
|
||||
description.Members = make(map[string]*protobuf.CommunityMember)
|
||||
}
|
||||
description.Members[pk] = member
|
||||
}
|
||||
|
||||
for id, decryptedChannel := range decryptedDescription.Chats {
|
||||
if description.Chats == nil {
|
||||
description.Chats = make(map[string]*protobuf.CommunityChat)
|
||||
}
|
||||
|
||||
if channel := description.Chats[id]; channel != nil {
|
||||
if len(channel.Members) == 0 {
|
||||
channel.Members = decryptedChannel.Members
|
||||
}
|
||||
} else {
|
||||
description.Chats[id] = decryptedChannel
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return failedToDecrypt, nil
|
||||
}
|
||||
153
vendor/github.com/status-im/status-go/protocol/communities/community_encryption_key_action.go
generated
vendored
Normal file
153
vendor/github.com/status-im/status-go/protocol/communities/community_encryption_key_action.go
generated
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type KeyDistributor interface {
|
||||
Generate(community *Community, keyActions *EncryptionKeyActions) error
|
||||
Distribute(community *Community, keyActions *EncryptionKeyActions) error
|
||||
}
|
||||
|
||||
type EncryptionKeyActionType int
|
||||
|
||||
const (
|
||||
EncryptionKeyNone EncryptionKeyActionType = iota
|
||||
EncryptionKeyAdd
|
||||
EncryptionKeyRemove
|
||||
EncryptionKeyRekey
|
||||
EncryptionKeySendToMembers
|
||||
)
|
||||
|
||||
type EncryptionKeyAction struct {
|
||||
ActionType EncryptionKeyActionType
|
||||
Members map[string]*protobuf.CommunityMember
|
||||
RemovedMembers map[string]*protobuf.CommunityMember
|
||||
}
|
||||
|
||||
type EncryptionKeyActions struct {
|
||||
// community-level encryption key action
|
||||
CommunityKeyAction EncryptionKeyAction
|
||||
|
||||
// channel-level encryption key actions
|
||||
ChannelKeysActions map[string]EncryptionKeyAction // key is: chatID
|
||||
}
|
||||
|
||||
func EvaluateCommunityEncryptionKeyActions(origin, modified *Community) *EncryptionKeyActions {
|
||||
if origin == nil {
|
||||
// `modified` is a new community, create empty `origin` community
|
||||
origin = &Community{
|
||||
config: &Config{
|
||||
ID: modified.config.ID,
|
||||
CommunityDescription: &protobuf.CommunityDescription{
|
||||
Members: map[string]*protobuf.CommunityMember{},
|
||||
Permissions: &protobuf.CommunityPermissions{},
|
||||
Identity: &protobuf.ChatIdentity{},
|
||||
Chats: map[string]*protobuf.CommunityChat{},
|
||||
Categories: map[string]*protobuf.CommunityCategory{},
|
||||
AdminSettings: &protobuf.CommunityAdminSettings{},
|
||||
TokenPermissions: map[string]*protobuf.CommunityTokenPermission{},
|
||||
CommunityTokensMetadata: []*protobuf.CommunityTokenMetadata{},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
changes := EvaluateCommunityChanges(origin, modified)
|
||||
|
||||
result := &EncryptionKeyActions{
|
||||
CommunityKeyAction: *evaluateCommunityLevelEncryptionKeyAction(origin, modified, changes),
|
||||
ChannelKeysActions: *evaluateChannelLevelEncryptionKeyActions(origin, modified, changes),
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func evaluateCommunityLevelEncryptionKeyAction(origin, modified *Community, changes *CommunityChanges) *EncryptionKeyAction {
|
||||
return evaluateEncryptionKeyAction(
|
||||
origin.Encrypted(),
|
||||
modified.Encrypted(),
|
||||
changes.ControlNodeChanged != nil,
|
||||
modified.config.CommunityDescription.Members,
|
||||
changes.MembersAdded,
|
||||
changes.MembersRemoved,
|
||||
)
|
||||
}
|
||||
|
||||
func evaluateChannelLevelEncryptionKeyActions(origin, modified *Community, changes *CommunityChanges) *map[string]EncryptionKeyAction {
|
||||
result := make(map[string]EncryptionKeyAction)
|
||||
|
||||
for channelID := range modified.config.CommunityDescription.Chats {
|
||||
membersAdded := make(map[string]*protobuf.CommunityMember)
|
||||
membersRemoved := make(map[string]*protobuf.CommunityMember)
|
||||
|
||||
chatChanges, ok := changes.ChatsModified[channelID]
|
||||
if ok {
|
||||
membersAdded = chatChanges.MembersAdded
|
||||
membersRemoved = chatChanges.MembersRemoved
|
||||
}
|
||||
|
||||
result[channelID] = *evaluateEncryptionKeyAction(
|
||||
origin.ChannelEncrypted(channelID),
|
||||
modified.ChannelEncrypted(channelID),
|
||||
changes.ControlNodeChanged != nil,
|
||||
modified.config.CommunityDescription.Chats[channelID].Members,
|
||||
membersAdded,
|
||||
membersRemoved,
|
||||
)
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
||||
func evaluateEncryptionKeyAction(originEncrypted, modifiedEncrypted, controlNodeChanged bool,
|
||||
allMembers, membersAdded, membersRemoved map[string]*protobuf.CommunityMember) *EncryptionKeyAction {
|
||||
result := &EncryptionKeyAction{
|
||||
ActionType: EncryptionKeyNone,
|
||||
Members: map[string]*protobuf.CommunityMember{},
|
||||
}
|
||||
|
||||
copyMap := func(source map[string]*protobuf.CommunityMember) map[string]*protobuf.CommunityMember {
|
||||
to := make(map[string]*protobuf.CommunityMember)
|
||||
for pubKey, member := range source {
|
||||
to[pubKey] = member
|
||||
}
|
||||
return to
|
||||
}
|
||||
|
||||
// control node changed on closed community/channel
|
||||
if controlNodeChanged && modifiedEncrypted {
|
||||
result.ActionType = EncryptionKeyRekey
|
||||
result.Members = copyMap(allMembers)
|
||||
return result
|
||||
}
|
||||
|
||||
// encryption was just added
|
||||
if modifiedEncrypted && !originEncrypted {
|
||||
result.ActionType = EncryptionKeyAdd
|
||||
result.Members = copyMap(allMembers)
|
||||
return result
|
||||
}
|
||||
|
||||
// encryption was just removed
|
||||
if !modifiedEncrypted && originEncrypted {
|
||||
result.ActionType = EncryptionKeyRemove
|
||||
result.Members = copyMap(allMembers)
|
||||
return result
|
||||
}
|
||||
|
||||
// open community/channel does not require any actions
|
||||
if !modifiedEncrypted {
|
||||
return result
|
||||
}
|
||||
|
||||
if len(membersRemoved) > 0 {
|
||||
result.ActionType = EncryptionKeyRekey
|
||||
result.Members = copyMap(allMembers)
|
||||
result.RemovedMembers = copyMap(membersRemoved)
|
||||
} else if len(membersAdded) > 0 {
|
||||
result.ActionType = EncryptionKeySendToMembers
|
||||
result.Members = copyMap(membersAdded)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
414
vendor/github.com/status-im/status-go/protocol/communities/community_event.go
generated
vendored
Normal file
414
vendor/github.com/status-im/status-go/protocol/communities/community_event.go
generated
vendored
Normal file
@@ -0,0 +1,414 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
utils "github.com/status-im/status-go/common"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
var ErrInvalidCommunityEventClock = errors.New("clock for admin event message is outdated")
|
||||
|
||||
func (o *Community) ToCreateChannelCommunityEvent(channelID string, channel *protobuf.CommunityChat) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CHANNEL_CREATE,
|
||||
ChannelData: &protobuf.ChannelData{
|
||||
ChannelId: channelID,
|
||||
Channel: channel,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToEditChannelCommunityEvent(channelID string, channel *protobuf.CommunityChat) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CHANNEL_EDIT,
|
||||
ChannelData: &protobuf.ChannelData{
|
||||
ChannelId: channelID,
|
||||
Channel: channel,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToDeleteChannelCommunityEvent(channelID string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CHANNEL_DELETE,
|
||||
ChannelData: &protobuf.ChannelData{
|
||||
ChannelId: channelID,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToReorderChannelCommunityEvent(categoryID string, channelID string, position int) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CHANNEL_REORDER,
|
||||
ChannelData: &protobuf.ChannelData{
|
||||
CategoryId: categoryID,
|
||||
ChannelId: channelID,
|
||||
Position: int32(position),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToCreateCategoryCommunityEvent(categoryID string, categoryName string, channelsIds []string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CATEGORY_CREATE,
|
||||
CategoryData: &protobuf.CategoryData{
|
||||
Name: categoryName,
|
||||
CategoryId: categoryID,
|
||||
ChannelsIds: channelsIds,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToEditCategoryCommunityEvent(categoryID string, categoryName string, channelsIds []string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CATEGORY_EDIT,
|
||||
CategoryData: &protobuf.CategoryData{
|
||||
Name: categoryName,
|
||||
CategoryId: categoryID,
|
||||
ChannelsIds: channelsIds,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToDeleteCategoryCommunityEvent(categoryID string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CATEGORY_DELETE,
|
||||
CategoryData: &protobuf.CategoryData{
|
||||
CategoryId: categoryID,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToReorderCategoryCommunityEvent(categoryID string, position int) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_CATEGORY_REORDER,
|
||||
CategoryData: &protobuf.CategoryData{
|
||||
CategoryId: categoryID,
|
||||
Position: int32(position),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToBanCommunityMemberCommunityEvent(pubkey string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_MEMBER_BAN,
|
||||
MemberToAction: pubkey,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToUnbanCommunityMemberCommunityEvent(pubkey string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_MEMBER_UNBAN,
|
||||
MemberToAction: pubkey,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToKickCommunityMemberCommunityEvent(pubkey string) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_MEMBER_KICK,
|
||||
MemberToAction: pubkey,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToCommunityEditCommunityEvent(description *protobuf.CommunityDescription) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_EDIT,
|
||||
CommunityConfig: &protobuf.CommunityConfig{
|
||||
Identity: description.Identity,
|
||||
Permissions: description.Permissions,
|
||||
AdminSettings: description.AdminSettings,
|
||||
IntroMessage: description.IntroMessage,
|
||||
OutroMessage: description.OutroMessage,
|
||||
Tags: description.Tags,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToCommunityTokenPermissionChangeCommunityEvent(permission *protobuf.CommunityTokenPermission) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_CHANGE,
|
||||
TokenPermission: permission,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToCommunityTokenPermissionDeleteCommunityEvent(permission *protobuf.CommunityTokenPermission) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_DELETE,
|
||||
TokenPermission: permission,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToCommunityRequestToJoinAcceptCommunityEvent(changes *CommunityEventChanges) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_REQUEST_TO_JOIN_ACCEPT,
|
||||
AcceptedRequestsToJoin: changes.AcceptedRequestsToJoin,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToCommunityRequestToJoinRejectCommunityEvent(changes *CommunityEventChanges) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_REQUEST_TO_JOIN_REJECT,
|
||||
RejectedRequestsToJoin: changes.RejectedRequestsToJoin,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) ToAddTokenMetadataCommunityEvent(tokenMetadata *protobuf.CommunityTokenMetadata) *CommunityEvent {
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: o.NewCommunityEventClock(),
|
||||
Type: protobuf.CommunityEvent_COMMUNITY_TOKEN_ADD,
|
||||
TokenMetadata: tokenMetadata,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Community) UpdateCommunityByEvents(communityEventMessage *CommunityEventsMessage) error {
|
||||
o.mutex.Lock()
|
||||
defer o.mutex.Unlock()
|
||||
|
||||
// Validate that EventsBaseCommunityDescription was signed by the control node
|
||||
description, err := validateAndGetEventsMessageCommunityDescription(communityEventMessage.EventsBaseCommunityDescription, o.ControlNode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if description.Clock != o.config.CommunityDescription.Clock {
|
||||
return ErrInvalidCommunityEventClock
|
||||
}
|
||||
|
||||
// Merge community events to existing community. Community events must be stored to the db
|
||||
// during saving the community
|
||||
o.mergeCommunityEvents(communityEventMessage)
|
||||
|
||||
if o.encryptor != nil {
|
||||
_, err = decryptDescription(o.ID(), o.encryptor, description, o.config.Logger)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
o.config.CommunityDescription = description
|
||||
o.config.CommunityDescriptionProtocolMessage = communityEventMessage.EventsBaseCommunityDescription
|
||||
|
||||
// Update the copy of the CommunityDescription by community events
|
||||
err = o.updateCommunityDescriptionByEvents()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Community) updateCommunityDescriptionByEvents() error {
|
||||
if o.config.EventsData == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, event := range o.config.EventsData.Events {
|
||||
err := o.updateCommunityDescriptionByCommunityEvent(event)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Community) updateCommunityDescriptionByCommunityEvent(communityEvent CommunityEvent) error {
|
||||
switch communityEvent.Type {
|
||||
case protobuf.CommunityEvent_COMMUNITY_EDIT:
|
||||
o.config.CommunityDescription.Identity = communityEvent.CommunityConfig.Identity
|
||||
o.config.CommunityDescription.Permissions = communityEvent.CommunityConfig.Permissions
|
||||
o.config.CommunityDescription.AdminSettings = communityEvent.CommunityConfig.AdminSettings
|
||||
o.config.CommunityDescription.IntroMessage = communityEvent.CommunityConfig.IntroMessage
|
||||
o.config.CommunityDescription.OutroMessage = communityEvent.CommunityConfig.OutroMessage
|
||||
o.config.CommunityDescription.Tags = communityEvent.CommunityConfig.Tags
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_CHANGE:
|
||||
if o.IsControlNode() {
|
||||
_, err := o.upsertTokenPermission(communityEvent.TokenPermission)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_DELETE:
|
||||
if o.IsControlNode() {
|
||||
_, err := o.deleteTokenPermission(communityEvent.TokenPermission.Id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_CREATE:
|
||||
_, err := o.createCategory(communityEvent.CategoryData.CategoryId, communityEvent.CategoryData.Name, communityEvent.CategoryData.ChannelsIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_DELETE:
|
||||
_, err := o.deleteCategory(communityEvent.CategoryData.CategoryId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_EDIT:
|
||||
_, err := o.editCategory(communityEvent.CategoryData.CategoryId, communityEvent.CategoryData.Name, communityEvent.CategoryData.ChannelsIds)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_CREATE:
|
||||
err := o.createChat(communityEvent.ChannelData.ChannelId, communityEvent.ChannelData.Channel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_DELETE:
|
||||
o.deleteChat(communityEvent.ChannelData.ChannelId)
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_EDIT:
|
||||
err := o.editChat(communityEvent.ChannelData.ChannelId, communityEvent.ChannelData.Channel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_REORDER:
|
||||
_, err := o.reorderChat(communityEvent.ChannelData.CategoryId, communityEvent.ChannelData.ChannelId, int(communityEvent.ChannelData.Position))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_REORDER:
|
||||
_, err := o.reorderCategories(communityEvent.CategoryData.CategoryId, int(communityEvent.CategoryData.Position))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_KICK:
|
||||
if o.IsControlNode() {
|
||||
pk, err := common.HexToPubkey(communityEvent.MemberToAction)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o.removeMemberFromOrg(pk)
|
||||
}
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_BAN:
|
||||
if o.IsControlNode() {
|
||||
pk, err := common.HexToPubkey(communityEvent.MemberToAction)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o.banUserFromCommunity(pk)
|
||||
}
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_UNBAN:
|
||||
if o.IsControlNode() {
|
||||
pk, err := common.HexToPubkey(communityEvent.MemberToAction)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
o.unbanUserFromCommunity(pk)
|
||||
}
|
||||
case protobuf.CommunityEvent_COMMUNITY_TOKEN_ADD:
|
||||
o.config.CommunityDescription.CommunityTokensMetadata = append(o.config.CommunityDescription.CommunityTokensMetadata, communityEvent.TokenMetadata)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Community) NewCommunityEventClock() uint64 {
|
||||
return uint64(time.Now().Unix())
|
||||
}
|
||||
|
||||
func (o *Community) addNewCommunityEvent(event *CommunityEvent) error {
|
||||
err := validateCommunityEvent(event)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// All events must be built on top of the control node CommunityDescription
|
||||
// If there were no events before, extract CommunityDescription from CommunityDescriptionProtocolMessage
|
||||
// and check the signature
|
||||
if o.config.EventsData == nil || len(o.config.EventsData.EventsBaseCommunityDescription) == 0 {
|
||||
_, err := validateAndGetEventsMessageCommunityDescription(o.config.CommunityDescriptionProtocolMessage, o.ControlNode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.config.EventsData = &EventsData{
|
||||
EventsBaseCommunityDescription: o.config.CommunityDescriptionProtocolMessage,
|
||||
Events: []CommunityEvent{},
|
||||
}
|
||||
}
|
||||
|
||||
event.Payload, err = proto.Marshal(event.ToProtobuf())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
o.config.EventsData.Events = append(o.config.EventsData.Events, *event)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Community) ToCommunityEventsMessage() *CommunityEventsMessage {
|
||||
return &CommunityEventsMessage{
|
||||
CommunityID: o.ID(),
|
||||
EventsBaseCommunityDescription: o.config.EventsData.EventsBaseCommunityDescription,
|
||||
Events: o.config.EventsData.Events,
|
||||
}
|
||||
}
|
||||
|
||||
func validateAndGetEventsMessageCommunityDescription(signedDescription []byte, signerPubkey *ecdsa.PublicKey) (*protobuf.CommunityDescription, error) {
|
||||
metadata := &protobuf.ApplicationMetadataMessage{}
|
||||
|
||||
err := proto.Unmarshal(signedDescription, metadata)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if metadata.Type != protobuf.ApplicationMetadataMessage_COMMUNITY_DESCRIPTION {
|
||||
return nil, ErrInvalidMessage
|
||||
}
|
||||
|
||||
signer, err := utils.RecoverKey(metadata)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if signer == nil {
|
||||
return nil, errors.New("CommunityDescription does not contain the control node signature")
|
||||
}
|
||||
|
||||
if !signer.Equal(signerPubkey) {
|
||||
return nil, errors.New("CommunityDescription was not signed by an owner")
|
||||
}
|
||||
|
||||
description := &protobuf.CommunityDescription{}
|
||||
|
||||
err = proto.Unmarshal(metadata.Payload, description)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return description, nil
|
||||
}
|
||||
285
vendor/github.com/status-im/status-go/protocol/communities/community_event_message.go
generated
vendored
Normal file
285
vendor/github.com/status-im/status-go/protocol/communities/community_event_message.go
generated
vendored
Normal file
@@ -0,0 +1,285 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/ecdsa"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"sort"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type CommunityEvent struct {
|
||||
CommunityEventClock uint64 `json:"communityEventClock"`
|
||||
Type protobuf.CommunityEvent_EventType `json:"type"`
|
||||
CommunityConfig *protobuf.CommunityConfig `json:"communityConfig,omitempty"`
|
||||
TokenPermission *protobuf.CommunityTokenPermission `json:"tokenPermissions,omitempty"`
|
||||
CategoryData *protobuf.CategoryData `json:"categoryData,omitempty"`
|
||||
ChannelData *protobuf.ChannelData `json:"channelData,omitempty"`
|
||||
MemberToAction string `json:"memberToAction,omitempty"`
|
||||
MembersAdded map[string]*protobuf.CommunityMember `json:"membersAdded,omitempty"`
|
||||
RejectedRequestsToJoin map[string]*protobuf.CommunityRequestToJoin `json:"rejectedRequestsToJoin,omitempty"`
|
||||
AcceptedRequestsToJoin map[string]*protobuf.CommunityRequestToJoin `json:"acceptedRequestsToJoin,omitempty"`
|
||||
TokenMetadata *protobuf.CommunityTokenMetadata `json:"tokenMetadata,omitempty"`
|
||||
Payload []byte `json:"payload"`
|
||||
Signature []byte `json:"signature"`
|
||||
}
|
||||
|
||||
func (e *CommunityEvent) ToProtobuf() *protobuf.CommunityEvent {
|
||||
return &protobuf.CommunityEvent{
|
||||
CommunityEventClock: e.CommunityEventClock,
|
||||
Type: e.Type,
|
||||
CommunityConfig: e.CommunityConfig,
|
||||
TokenPermission: e.TokenPermission,
|
||||
CategoryData: e.CategoryData,
|
||||
ChannelData: e.ChannelData,
|
||||
MemberToAction: e.MemberToAction,
|
||||
MembersAdded: e.MembersAdded,
|
||||
RejectedRequestsToJoin: e.RejectedRequestsToJoin,
|
||||
AcceptedRequestsToJoin: e.AcceptedRequestsToJoin,
|
||||
TokenMetadata: e.TokenMetadata,
|
||||
}
|
||||
}
|
||||
|
||||
func communityEventFromProtobuf(msg *protobuf.SignedCommunityEvent) (*CommunityEvent, error) {
|
||||
decodedEvent := protobuf.CommunityEvent{}
|
||||
err := proto.Unmarshal(msg.Payload, &decodedEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &CommunityEvent{
|
||||
CommunityEventClock: decodedEvent.CommunityEventClock,
|
||||
Type: decodedEvent.Type,
|
||||
CommunityConfig: decodedEvent.CommunityConfig,
|
||||
TokenPermission: decodedEvent.TokenPermission,
|
||||
CategoryData: decodedEvent.CategoryData,
|
||||
ChannelData: decodedEvent.ChannelData,
|
||||
MemberToAction: decodedEvent.MemberToAction,
|
||||
MembersAdded: decodedEvent.MembersAdded,
|
||||
RejectedRequestsToJoin: decodedEvent.RejectedRequestsToJoin,
|
||||
AcceptedRequestsToJoin: decodedEvent.AcceptedRequestsToJoin,
|
||||
TokenMetadata: decodedEvent.TokenMetadata,
|
||||
Payload: msg.Payload,
|
||||
Signature: msg.Signature,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (e *CommunityEvent) RecoverSigner() (*ecdsa.PublicKey, error) {
|
||||
if e.Signature == nil || len(e.Signature) == 0 {
|
||||
return nil, errors.New("missing signature")
|
||||
}
|
||||
|
||||
signer, err := crypto.SigToPub(
|
||||
crypto.Keccak256(e.Payload),
|
||||
e.Signature,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to recover signer")
|
||||
}
|
||||
|
||||
return signer, nil
|
||||
}
|
||||
|
||||
func (e *CommunityEvent) Sign(pk *ecdsa.PrivateKey) error {
|
||||
sig, err := crypto.Sign(crypto.Keccak256(e.Payload), pk)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
e.Signature = sig
|
||||
return nil
|
||||
}
|
||||
|
||||
type CommunityEventsMessage struct {
|
||||
CommunityID []byte `json:"communityId"`
|
||||
EventsBaseCommunityDescription []byte `json:"eventsBaseCommunityDescription"`
|
||||
Events []CommunityEvent `json:"events,omitempty"`
|
||||
}
|
||||
|
||||
func (m *CommunityEventsMessage) ToProtobuf() *protobuf.CommunityEventsMessage {
|
||||
result := protobuf.CommunityEventsMessage{
|
||||
CommunityId: m.CommunityID,
|
||||
EventsBaseCommunityDescription: m.EventsBaseCommunityDescription,
|
||||
SignedEvents: []*protobuf.SignedCommunityEvent{},
|
||||
}
|
||||
|
||||
for _, event := range m.Events {
|
||||
signedEvent := &protobuf.SignedCommunityEvent{
|
||||
Signature: event.Signature,
|
||||
Payload: event.Payload,
|
||||
}
|
||||
result.SignedEvents = append(result.SignedEvents, signedEvent)
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
||||
func CommunityEventsMessageFromProtobuf(msg *protobuf.CommunityEventsMessage) (*CommunityEventsMessage, error) {
|
||||
result := &CommunityEventsMessage{
|
||||
CommunityID: msg.CommunityId,
|
||||
EventsBaseCommunityDescription: msg.EventsBaseCommunityDescription,
|
||||
Events: []CommunityEvent{},
|
||||
}
|
||||
|
||||
for _, signedEvent := range msg.SignedEvents {
|
||||
event, err := communityEventFromProtobuf(signedEvent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Events = append(result.Events, *event)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *CommunityEventsMessage) Marshal() ([]byte, error) {
|
||||
pb := m.ToProtobuf()
|
||||
return proto.Marshal(pb)
|
||||
}
|
||||
|
||||
func (c *Community) mergeCommunityEvents(communityEventMessage *CommunityEventsMessage) {
|
||||
if c.config.EventsData == nil {
|
||||
c.config.EventsData = &EventsData{
|
||||
EventsBaseCommunityDescription: communityEventMessage.EventsBaseCommunityDescription,
|
||||
Events: communityEventMessage.Events,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
for _, update := range communityEventMessage.Events {
|
||||
var exists bool
|
||||
for _, existing := range c.config.EventsData.Events {
|
||||
if isCommunityEventsEqual(update, existing) {
|
||||
exists = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !exists {
|
||||
c.config.EventsData.Events = append(c.config.EventsData.Events, update)
|
||||
}
|
||||
}
|
||||
|
||||
c.sortCommunityEvents()
|
||||
}
|
||||
|
||||
func (c *Community) sortCommunityEvents() {
|
||||
sort.Slice(c.config.EventsData.Events, func(i, j int) bool {
|
||||
return c.config.EventsData.Events[i].CommunityEventClock < c.config.EventsData.Events[j].CommunityEventClock
|
||||
})
|
||||
}
|
||||
|
||||
func validateCommunityEvent(communityEvent *CommunityEvent) error {
|
||||
switch communityEvent.Type {
|
||||
case protobuf.CommunityEvent_COMMUNITY_EDIT:
|
||||
if communityEvent.CommunityConfig == nil || communityEvent.CommunityConfig.Identity == nil ||
|
||||
communityEvent.CommunityConfig.Permissions == nil || communityEvent.CommunityConfig.AdminSettings == nil {
|
||||
return errors.New("invalid config change admin event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_CHANGE:
|
||||
if communityEvent.TokenPermission == nil || len(communityEvent.TokenPermission.Id) == 0 {
|
||||
return errors.New("invalid token permission change event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_DELETE:
|
||||
if communityEvent.TokenPermission == nil || len(communityEvent.TokenPermission.Id) == 0 {
|
||||
return errors.New("invalid token permission delete event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_CREATE:
|
||||
if communityEvent.CategoryData == nil || len(communityEvent.CategoryData.CategoryId) == 0 {
|
||||
return errors.New("invalid community category create event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_DELETE:
|
||||
if communityEvent.CategoryData == nil || len(communityEvent.CategoryData.CategoryId) == 0 {
|
||||
return errors.New("invalid community category delete event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_EDIT:
|
||||
if communityEvent.CategoryData == nil || len(communityEvent.CategoryData.CategoryId) == 0 {
|
||||
return errors.New("invalid community category edit event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_CREATE:
|
||||
if communityEvent.ChannelData == nil || len(communityEvent.ChannelData.ChannelId) == 0 ||
|
||||
communityEvent.ChannelData.Channel == nil {
|
||||
return errors.New("invalid community channel create event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_DELETE:
|
||||
if communityEvent.ChannelData == nil || len(communityEvent.ChannelData.ChannelId) == 0 {
|
||||
return errors.New("invalid community channel delete event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_EDIT:
|
||||
if communityEvent.ChannelData == nil || len(communityEvent.ChannelData.ChannelId) == 0 ||
|
||||
communityEvent.ChannelData.Channel == nil {
|
||||
return errors.New("invalid community channel edit event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CHANNEL_REORDER:
|
||||
if communityEvent.ChannelData == nil || len(communityEvent.ChannelData.ChannelId) == 0 {
|
||||
return errors.New("invalid community channel reorder event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_CATEGORY_REORDER:
|
||||
if communityEvent.CategoryData == nil || len(communityEvent.CategoryData.CategoryId) == 0 {
|
||||
return errors.New("invalid community category reorder event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_REQUEST_TO_JOIN_ACCEPT:
|
||||
if communityEvent.AcceptedRequestsToJoin == nil {
|
||||
return errors.New("invalid community request to join accepted event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_REQUEST_TO_JOIN_REJECT:
|
||||
if communityEvent.RejectedRequestsToJoin == nil {
|
||||
return errors.New("invalid community request to join reject event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_KICK:
|
||||
if len(communityEvent.MemberToAction) == 0 {
|
||||
return errors.New("invalid community member kick event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_BAN:
|
||||
if len(communityEvent.MemberToAction) == 0 {
|
||||
return errors.New("invalid community member ban event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_MEMBER_UNBAN:
|
||||
if len(communityEvent.MemberToAction) == 0 {
|
||||
return errors.New("invalid community member unban event")
|
||||
}
|
||||
|
||||
case protobuf.CommunityEvent_COMMUNITY_TOKEN_ADD:
|
||||
if communityEvent.TokenMetadata == nil || len(communityEvent.TokenMetadata.ContractAddresses) == 0 {
|
||||
return errors.New("invalid add community token event")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func isCommunityEventsEqual(left CommunityEvent, right CommunityEvent) bool {
|
||||
return bytes.Equal(left.Payload, right.Payload)
|
||||
}
|
||||
|
||||
func communityEventsToJSONEncodedBytes(communityEvents []CommunityEvent) ([]byte, error) {
|
||||
return json.Marshal(communityEvents)
|
||||
}
|
||||
|
||||
func communityEventsFromJSONEncodedBytes(jsonEncodedRawEvents []byte) ([]CommunityEvent, error) {
|
||||
var events []CommunityEvent
|
||||
err := json.Unmarshal(jsonEncodedRawEvents, &events)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return events, nil
|
||||
}
|
||||
65
vendor/github.com/status-im/status-go/protocol/communities/community_token_permission.go
generated
vendored
Normal file
65
vendor/github.com/status-im/status-go/protocol/communities/community_token_permission.go
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type TokenPermissionState uint8
|
||||
|
||||
const (
|
||||
TokenPermissionApproved TokenPermissionState = iota
|
||||
TokenPermissionAdditionPending
|
||||
TokenPermissionUpdatePending
|
||||
TokenPermissionRemovalPending
|
||||
)
|
||||
|
||||
type CommunityTokenPermission struct {
|
||||
*protobuf.CommunityTokenPermission
|
||||
State TokenPermissionState `json:"state,omitempty"`
|
||||
}
|
||||
|
||||
func NewCommunityTokenPermission(base *protobuf.CommunityTokenPermission) *CommunityTokenPermission {
|
||||
return &CommunityTokenPermission{
|
||||
CommunityTokenPermission: base,
|
||||
State: TokenPermissionApproved,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *CommunityTokenPermission) Equals(other *CommunityTokenPermission) bool {
|
||||
if p.Id != other.Id ||
|
||||
p.Type != other.Type ||
|
||||
len(p.TokenCriteria) != len(other.TokenCriteria) ||
|
||||
len(p.ChatIds) != len(other.ChatIds) ||
|
||||
p.IsPrivate != other.IsPrivate ||
|
||||
p.State != other.State {
|
||||
return false
|
||||
}
|
||||
|
||||
for i := range p.TokenCriteria {
|
||||
if !compareTokenCriteria(p.TokenCriteria[i], other.TokenCriteria[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return reflect.DeepEqual(p.ChatIds, other.ChatIds)
|
||||
}
|
||||
|
||||
func compareTokenCriteria(a, b *protobuf.TokenCriteria) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.Type == b.Type &&
|
||||
a.Symbol == b.Symbol &&
|
||||
a.Name == b.Name &&
|
||||
a.Amount == b.Amount &&
|
||||
a.EnsPattern == b.EnsPattern &&
|
||||
a.Decimals == b.Decimals &&
|
||||
reflect.DeepEqual(a.ContractAddresses, b.ContractAddresses) &&
|
||||
reflect.DeepEqual(a.TokenIds, b.TokenIds)
|
||||
}
|
||||
87
vendor/github.com/status-im/status-go/protocol/communities/communnity_privileged_member_sync_msg.go
generated
vendored
Normal file
87
vendor/github.com/status-im/status-go/protocol/communities/communnity_privileged_member_sync_msg.go
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type CommunityPrivilegedMemberSyncMessage struct {
|
||||
CommunityPrivateKey *ecdsa.PrivateKey
|
||||
Receivers []*ecdsa.PublicKey
|
||||
CommunityPrivilegedUserSyncMessage *protobuf.CommunityPrivilegedUserSyncMessage
|
||||
}
|
||||
|
||||
func (m *Manager) HandleRequestToJoinPrivilegedUserSyncMessage(message *protobuf.CommunityPrivilegedUserSyncMessage, communityID types.HexBytes) ([]*RequestToJoin, error) {
|
||||
var state RequestToJoinState
|
||||
if message.Type == protobuf.CommunityPrivilegedUserSyncMessage_CONTROL_NODE_ACCEPT_REQUEST_TO_JOIN {
|
||||
state = RequestToJoinStateAccepted
|
||||
} else {
|
||||
state = RequestToJoinStateDeclined
|
||||
}
|
||||
|
||||
requestsToJoin := make([]*RequestToJoin, 0)
|
||||
for signer, requestToJoinProto := range message.RequestToJoin {
|
||||
requestToJoin := &RequestToJoin{
|
||||
PublicKey: signer,
|
||||
Clock: requestToJoinProto.Clock,
|
||||
ENSName: requestToJoinProto.EnsName,
|
||||
CommunityID: requestToJoinProto.CommunityId,
|
||||
State: state,
|
||||
RevealedAccounts: requestToJoinProto.RevealedAccounts,
|
||||
}
|
||||
requestToJoin.CalculateID()
|
||||
|
||||
if _, err := m.saveOrUpdateRequestToJoin(communityID, requestToJoin); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := m.persistence.RemoveRequestToJoinRevealedAddresses(requestToJoin.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if requestToJoin.RevealedAccounts != nil && len(requestToJoin.RevealedAccounts) > 0 {
|
||||
if err := m.persistence.SaveRequestToJoinRevealedAddresses(requestToJoin.ID, requestToJoin.RevealedAccounts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
requestsToJoin = append(requestsToJoin, requestToJoin)
|
||||
}
|
||||
|
||||
return requestsToJoin, nil
|
||||
}
|
||||
|
||||
func (m *Manager) HandleSyncAllRequestToJoinForNewPrivilegedMember(message *protobuf.CommunityPrivilegedUserSyncMessage, communityID types.HexBytes) ([]*RequestToJoin, error) {
|
||||
nonAcceptedRequestsToJoin := []*RequestToJoin{}
|
||||
|
||||
myPk := common.PubkeyToHex(&m.identity.PublicKey)
|
||||
|
||||
// We received all requests to join from the control node. Remove all requests to join except our own
|
||||
err := m.persistence.RemoveAllCommunityRequestsToJoinWithRevealedAddressesExceptPublicKey(myPk, communityID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, syncRequestToJoin := range message.SyncRequestsToJoin {
|
||||
requestToJoin := new(RequestToJoin)
|
||||
requestToJoin.InitFromSyncProtobuf(syncRequestToJoin)
|
||||
|
||||
if _, err := m.saveOrUpdateRequestToJoin(communityID, requestToJoin); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if requestToJoin.RevealedAccounts != nil && len(requestToJoin.RevealedAccounts) > 0 {
|
||||
if err := m.persistence.SaveRequestToJoinRevealedAddresses(requestToJoin.ID, requestToJoin.RevealedAccounts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if requestToJoin.State != RequestToJoinStateAccepted {
|
||||
nonAcceptedRequestsToJoin = append(nonAcceptedRequestsToJoin, requestToJoin)
|
||||
}
|
||||
}
|
||||
return nonAcceptedRequestsToJoin, nil
|
||||
}
|
||||
47
vendor/github.com/status-im/status-go/protocol/communities/errors.go
generated
vendored
Normal file
47
vendor/github.com/status-im/status-go/protocol/communities/errors.go
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
package communities
|
||||
|
||||
import "errors"
|
||||
|
||||
var ErrChatNotFound = errors.New("chat not found")
|
||||
var ErrCategoryNotFound = errors.New("category not found")
|
||||
var ErrNoChangeInPosition = errors.New("no change in category position")
|
||||
var ErrChatAlreadyAssigned = errors.New("chat already assigned to a category")
|
||||
var ErrOrgNotFound = errors.New("community not found")
|
||||
var ErrOrgAlreadyJoined = errors.New("community already joined")
|
||||
var ErrChatAlreadyExists = errors.New("chat already exists")
|
||||
var ErrCategoryAlreadyExists = errors.New("category already exists")
|
||||
var ErrCantRequestAccess = errors.New("can't request access")
|
||||
var ErrInvalidCommunityDescription = errors.New("invalid community description")
|
||||
var ErrInvalidCommunityDescriptionNoOrgPermissions = errors.New("invalid community description no org permissions")
|
||||
var ErrInvalidCommunityDescriptionNoChatPermissions = errors.New("invalid community description no chat permissions")
|
||||
var ErrInvalidCommunityDescriptionUnknownChatAccess = errors.New("invalid community description unknown chat access")
|
||||
var ErrInvalidCommunityDescriptionUnknownOrgAccess = errors.New("invalid community description unknown org access")
|
||||
var ErrInvalidCommunityDescriptionMemberInChatButNotInOrg = errors.New("invalid community description member in chat but not in org")
|
||||
var ErrInvalidCommunityDescriptionCategoryNoID = errors.New("invalid community category id")
|
||||
var ErrInvalidCommunityDescriptionCategoryNoName = errors.New("invalid community category name")
|
||||
var ErrInvalidCommunityDescriptionChatIdentity = errors.New("invalid community chat name, missing")
|
||||
var ErrInvalidCommunityDescriptionDuplicatedName = errors.New("invalid community chat name, duplicated")
|
||||
var ErrInvalidCommunityDescriptionUnknownChatCategory = errors.New("invalid community category in chat")
|
||||
var ErrInvalidCommunityTags = errors.New("invalid community tags")
|
||||
var ErrNotAdmin = errors.New("no admin privileges for this community")
|
||||
var ErrNotOwner = errors.New("no owner privileges for this community")
|
||||
var ErrNotControlNode = errors.New("not a control node")
|
||||
var ErrInvalidGrant = errors.New("invalid grant")
|
||||
var ErrNotAuthorized = errors.New("not authorized")
|
||||
var ErrAlreadyMember = errors.New("already a member")
|
||||
var ErrAlreadyJoined = errors.New("already joined")
|
||||
var ErrInvalidMessage = errors.New("invalid community description message")
|
||||
var ErrMemberNotFound = errors.New("member not found")
|
||||
var ErrTokenPermissionAlreadyExists = errors.New("token permission already exists")
|
||||
var ErrTokenPermissionNotFound = errors.New("token permission not found")
|
||||
var ErrNoPermissionToJoin = errors.New("member has no permission to join")
|
||||
var ErrMemberWalletAlreadyExists = errors.New("member wallet already exists")
|
||||
var ErrMemberWalletNotFound = errors.New("member wallet not found")
|
||||
var ErrNotEnoughPermissions = errors.New("not enough permissions for this community")
|
||||
var ErrCannotRemoveOwnerOrAdmin = errors.New("not allowed to remove admin or owner")
|
||||
var ErrCannotBanOwnerOrAdmin = errors.New("not allowed to ban admin or owner")
|
||||
var ErrInvalidManageTokensPermission = errors.New("no privileges to manage tokens")
|
||||
var ErrRevealedAccountsAbsent = errors.New("revealed accounts is absent")
|
||||
var ErrNoRevealedAccountsSignature = errors.New("revealed accounts without the signature")
|
||||
var ErrNoFreeSpaceForHistoryArchives = errors.New("history archive: No free space for downloading history archives")
|
||||
var ErrPermissionToJoinNotSatisfied = errors.New("permission to join not satisfied")
|
||||
5216
vendor/github.com/status-im/status-go/protocol/communities/manager.go
generated
vendored
Normal file
5216
vendor/github.com/status-im/status-go/protocol/communities/manager.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
441
vendor/github.com/status-im/status-go/protocol/communities/permission_checker.go
generated
vendored
Normal file
441
vendor/github.com/status-im/status-go/protocol/communities/permission_checker.go
generated
vendored
Normal file
@@ -0,0 +1,441 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"math"
|
||||
"math/big"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
maps "golang.org/x/exp/maps"
|
||||
slices "golang.org/x/exp/slices"
|
||||
|
||||
gethcommon "github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/common/hexutil"
|
||||
"github.com/status-im/status-go/protocol/ens"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
walletcommon "github.com/status-im/status-go/services/wallet/common"
|
||||
"github.com/status-im/status-go/services/wallet/thirdparty"
|
||||
)
|
||||
|
||||
type PermissionChecker interface {
|
||||
CheckPermissionToJoin(*Community, []gethcommon.Address) (*CheckPermissionToJoinResponse, error)
|
||||
CheckPermissions(permissions []*CommunityTokenPermission, accountsAndChainIDs []*AccountChainIDsCombination, shortcircuit bool) (*CheckPermissionsResponse, error)
|
||||
}
|
||||
|
||||
type DefaultPermissionChecker struct {
|
||||
tokenManager TokenManager
|
||||
collectiblesManager CollectiblesManager
|
||||
ensVerifier *ens.Verifier
|
||||
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func (p *DefaultPermissionChecker) getOwnedENS(addresses []gethcommon.Address) ([]string, error) {
|
||||
ownedENS := make([]string, 0)
|
||||
if p.ensVerifier == nil {
|
||||
p.logger.Warn("no ensVerifier configured for communities manager")
|
||||
return ownedENS, nil
|
||||
}
|
||||
for _, address := range addresses {
|
||||
name, err := p.ensVerifier.ReverseResolve(address)
|
||||
if err != nil && err.Error() != "not a resolver" {
|
||||
return ownedENS, err
|
||||
}
|
||||
if name != "" {
|
||||
ownedENS = append(ownedENS, name)
|
||||
}
|
||||
}
|
||||
return ownedENS, nil
|
||||
}
|
||||
func (p *DefaultPermissionChecker) GetOwnedERC721Tokens(walletAddresses []gethcommon.Address, tokenRequirements map[uint64]map[string]*protobuf.TokenCriteria, chainIDs []uint64) (CollectiblesByChain, error) {
|
||||
if p.collectiblesManager == nil {
|
||||
return nil, errors.New("no collectibles manager")
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
ownedERC721Tokens := make(CollectiblesByChain)
|
||||
|
||||
for chainID, erc721Tokens := range tokenRequirements {
|
||||
|
||||
skipChain := true
|
||||
for _, cID := range chainIDs {
|
||||
if chainID == cID {
|
||||
skipChain = false
|
||||
}
|
||||
}
|
||||
|
||||
if skipChain {
|
||||
continue
|
||||
}
|
||||
|
||||
contractAddresses := make([]gethcommon.Address, 0)
|
||||
for contractAddress := range erc721Tokens {
|
||||
contractAddresses = append(contractAddresses, gethcommon.HexToAddress(contractAddress))
|
||||
}
|
||||
|
||||
if _, exists := ownedERC721Tokens[chainID]; !exists {
|
||||
ownedERC721Tokens[chainID] = make(map[gethcommon.Address]thirdparty.TokenBalancesPerContractAddress)
|
||||
}
|
||||
|
||||
for _, owner := range walletAddresses {
|
||||
balances, err := p.collectiblesManager.FetchBalancesByOwnerAndContractAddress(ctx, walletcommon.ChainID(chainID), owner, contractAddresses)
|
||||
if err != nil {
|
||||
p.logger.Info("couldn't fetch owner assets", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
ownedERC721Tokens[chainID][owner] = balances
|
||||
}
|
||||
}
|
||||
return ownedERC721Tokens, nil
|
||||
}
|
||||
|
||||
func (p *DefaultPermissionChecker) accountChainsCombinationToMap(combinations []*AccountChainIDsCombination) map[gethcommon.Address][]uint64 {
|
||||
result := make(map[gethcommon.Address][]uint64)
|
||||
for _, combination := range combinations {
|
||||
result[combination.Address] = combination.ChainIDs
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// merge valid combinations w/o duplicates
|
||||
func (p *DefaultPermissionChecker) MergeValidCombinations(left, right []*AccountChainIDsCombination) []*AccountChainIDsCombination {
|
||||
|
||||
leftMap := p.accountChainsCombinationToMap(left)
|
||||
rightMap := p.accountChainsCombinationToMap(right)
|
||||
|
||||
// merge maps, result in left map
|
||||
for k, v := range rightMap {
|
||||
if _, exists := leftMap[k]; !exists {
|
||||
leftMap[k] = v
|
||||
continue
|
||||
} else {
|
||||
// append chains which are new
|
||||
chains := leftMap[k]
|
||||
for _, chainID := range v {
|
||||
if !slices.Contains(chains, chainID) {
|
||||
chains = append(chains, chainID)
|
||||
}
|
||||
}
|
||||
leftMap[k] = chains
|
||||
}
|
||||
}
|
||||
|
||||
result := []*AccountChainIDsCombination{}
|
||||
for k, v := range leftMap {
|
||||
result = append(result, &AccountChainIDsCombination{
|
||||
Address: k,
|
||||
ChainIDs: v,
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (p *DefaultPermissionChecker) CheckPermissionToJoin(community *Community, addresses []gethcommon.Address) (*CheckPermissionToJoinResponse, error) {
|
||||
becomeAdminPermissions := community.TokenPermissionsByType(protobuf.CommunityTokenPermission_BECOME_ADMIN)
|
||||
becomeMemberPermissions := community.TokenPermissionsByType(protobuf.CommunityTokenPermission_BECOME_MEMBER)
|
||||
becomeTokenMasterPermissions := community.TokenPermissionsByType(protobuf.CommunityTokenPermission_BECOME_TOKEN_MASTER)
|
||||
|
||||
adminOrTokenMasterPermissionsToJoin := append(becomeAdminPermissions, becomeTokenMasterPermissions...)
|
||||
|
||||
allChainIDs, err := p.tokenManager.GetAllChainIDs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
accountsAndChainIDs := combineAddressesAndChainIDs(addresses, allChainIDs)
|
||||
|
||||
// Check becomeMember and (admin & token master) permissions separately.
|
||||
becomeMemberPermissionsResponse, err := p.checkPermissionsOrDefault(becomeMemberPermissions, accountsAndChainIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(adminOrTokenMasterPermissionsToJoin) <= 0 {
|
||||
return becomeMemberPermissionsResponse, nil
|
||||
}
|
||||
// If there are any admin or token master permissions, combine result.
|
||||
|
||||
adminOrTokenPermissionsResponse, err := p.CheckPermissions(adminOrTokenMasterPermissionsToJoin, accountsAndChainIDs, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mergedPermissions := make(map[string]*PermissionTokenCriteriaResult)
|
||||
maps.Copy(mergedPermissions, becomeMemberPermissionsResponse.Permissions)
|
||||
maps.Copy(mergedPermissions, adminOrTokenPermissionsResponse.Permissions)
|
||||
|
||||
mergedCombinations := p.MergeValidCombinations(becomeMemberPermissionsResponse.ValidCombinations, adminOrTokenPermissionsResponse.ValidCombinations)
|
||||
|
||||
combinedResponse := &CheckPermissionsResponse{
|
||||
Satisfied: becomeMemberPermissionsResponse.Satisfied || adminOrTokenPermissionsResponse.Satisfied,
|
||||
Permissions: mergedPermissions,
|
||||
ValidCombinations: mergedCombinations,
|
||||
}
|
||||
|
||||
return combinedResponse, nil
|
||||
}
|
||||
|
||||
func (p *DefaultPermissionChecker) checkPermissionsOrDefault(permissions []*CommunityTokenPermission, accountsAndChainIDs []*AccountChainIDsCombination) (*CheckPermissionsResponse, error) {
|
||||
if len(permissions) == 0 {
|
||||
// There are no permissions to join on this community at the moment,
|
||||
// so we reveal all accounts + all chain IDs
|
||||
response := &CheckPermissionsResponse{
|
||||
Satisfied: true,
|
||||
Permissions: make(map[string]*PermissionTokenCriteriaResult),
|
||||
ValidCombinations: accountsAndChainIDs,
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
return p.CheckPermissions(permissions, accountsAndChainIDs, false)
|
||||
}
|
||||
|
||||
// CheckPermissions will retrieve balances and check whether the user has
|
||||
// permission to join the community, if shortcircuit is true, it will stop as soon
|
||||
// as we know the answer
|
||||
func (p *DefaultPermissionChecker) CheckPermissions(permissions []*CommunityTokenPermission, accountsAndChainIDs []*AccountChainIDsCombination, shortcircuit bool) (*CheckPermissionsResponse, error) {
|
||||
|
||||
response := &CheckPermissionsResponse{
|
||||
Satisfied: false,
|
||||
Permissions: make(map[string]*PermissionTokenCriteriaResult),
|
||||
ValidCombinations: make([]*AccountChainIDsCombination, 0),
|
||||
}
|
||||
|
||||
erc20TokenRequirements, erc721TokenRequirements, _ := ExtractTokenCriteria(permissions)
|
||||
|
||||
erc20ChainIDsMap := make(map[uint64]bool)
|
||||
erc721ChainIDsMap := make(map[uint64]bool)
|
||||
|
||||
erc20TokenAddresses := make([]gethcommon.Address, 0)
|
||||
accounts := make([]gethcommon.Address, 0)
|
||||
|
||||
for _, accountAndChainIDs := range accountsAndChainIDs {
|
||||
accounts = append(accounts, accountAndChainIDs.Address)
|
||||
}
|
||||
|
||||
// figure out chain IDs we're interested in
|
||||
for chainID, tokens := range erc20TokenRequirements {
|
||||
erc20ChainIDsMap[chainID] = true
|
||||
for contractAddress := range tokens {
|
||||
erc20TokenAddresses = append(erc20TokenAddresses, gethcommon.HexToAddress(contractAddress))
|
||||
}
|
||||
}
|
||||
|
||||
for chainID := range erc721TokenRequirements {
|
||||
erc721ChainIDsMap[chainID] = true
|
||||
}
|
||||
|
||||
chainIDsForERC20 := calculateChainIDsSet(accountsAndChainIDs, erc20ChainIDsMap)
|
||||
chainIDsForERC721 := calculateChainIDsSet(accountsAndChainIDs, erc721ChainIDsMap)
|
||||
|
||||
// if there are no chain IDs that match token criteria chain IDs
|
||||
// we aren't able to check balances on selected networks
|
||||
if len(erc20ChainIDsMap) > 0 && len(chainIDsForERC20) == 0 {
|
||||
response.NetworksNotSupported = true
|
||||
return response, nil
|
||||
}
|
||||
|
||||
ownedERC20TokenBalances := make(map[uint64]map[gethcommon.Address]map[gethcommon.Address]*hexutil.Big, 0)
|
||||
if len(chainIDsForERC20) > 0 {
|
||||
// this only returns balances for the networks we're actually interested in
|
||||
balances, err := p.tokenManager.GetBalancesByChain(context.Background(), accounts, erc20TokenAddresses, chainIDsForERC20)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ownedERC20TokenBalances = balances
|
||||
}
|
||||
|
||||
ownedERC721Tokens := make(CollectiblesByChain)
|
||||
if len(chainIDsForERC721) > 0 {
|
||||
collectibles, err := p.GetOwnedERC721Tokens(accounts, erc721TokenRequirements, chainIDsForERC721)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ownedERC721Tokens = collectibles
|
||||
}
|
||||
|
||||
accountsChainIDsCombinations := make(map[gethcommon.Address]map[uint64]bool)
|
||||
|
||||
for _, tokenPermission := range permissions {
|
||||
|
||||
permissionRequirementsMet := true
|
||||
response.Permissions[tokenPermission.Id] = &PermissionTokenCriteriaResult{Role: tokenPermission.Type}
|
||||
|
||||
// There can be multiple token requirements per permission.
|
||||
// If only one is not met, the entire permission is marked
|
||||
// as not fulfilled
|
||||
for _, tokenRequirement := range tokenPermission.TokenCriteria {
|
||||
|
||||
tokenRequirementMet := false
|
||||
tokenRequirementResponse := TokenRequirementResponse{TokenCriteria: tokenRequirement}
|
||||
|
||||
if tokenRequirement.Type == protobuf.CommunityTokenType_ERC721 {
|
||||
if len(ownedERC721Tokens) == 0 {
|
||||
|
||||
response.Permissions[tokenPermission.Id].TokenRequirements = append(response.Permissions[tokenPermission.Id].TokenRequirements, tokenRequirementResponse)
|
||||
response.Permissions[tokenPermission.Id].Criteria = append(response.Permissions[tokenPermission.Id].Criteria, false)
|
||||
continue
|
||||
}
|
||||
|
||||
chainIDLoopERC721:
|
||||
for chainID, addressStr := range tokenRequirement.ContractAddresses {
|
||||
contractAddress := gethcommon.HexToAddress(addressStr)
|
||||
if _, exists := ownedERC721Tokens[chainID]; !exists || len(ownedERC721Tokens[chainID]) == 0 {
|
||||
continue chainIDLoopERC721
|
||||
}
|
||||
|
||||
for account := range ownedERC721Tokens[chainID] {
|
||||
if _, exists := ownedERC721Tokens[chainID][account]; !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
tokenBalances := ownedERC721Tokens[chainID][account][contractAddress]
|
||||
if len(tokenBalances) > 0 {
|
||||
// 'account' owns some TokenID owned from contract 'address'
|
||||
if _, exists := accountsChainIDsCombinations[account]; !exists {
|
||||
accountsChainIDsCombinations[account] = make(map[uint64]bool)
|
||||
}
|
||||
|
||||
if len(tokenRequirement.TokenIds) == 0 {
|
||||
// no specific tokenId of this collection is needed
|
||||
tokenRequirementMet = true
|
||||
accountsChainIDsCombinations[account][chainID] = true
|
||||
break chainIDLoopERC721
|
||||
}
|
||||
|
||||
tokenIDsLoop:
|
||||
for _, tokenID := range tokenRequirement.TokenIds {
|
||||
tokenIDBigInt := new(big.Int).SetUint64(tokenID)
|
||||
|
||||
for _, asset := range tokenBalances {
|
||||
if asset.TokenID.Cmp(tokenIDBigInt) == 0 && asset.Balance.Sign() > 0 {
|
||||
tokenRequirementMet = true
|
||||
accountsChainIDsCombinations[account][chainID] = true
|
||||
break tokenIDsLoop
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if tokenRequirement.Type == protobuf.CommunityTokenType_ERC20 {
|
||||
if len(ownedERC20TokenBalances) == 0 {
|
||||
response.Permissions[tokenPermission.Id].TokenRequirements = append(response.Permissions[tokenPermission.Id].TokenRequirements, tokenRequirementResponse)
|
||||
response.Permissions[tokenPermission.Id].Criteria = append(response.Permissions[tokenPermission.Id].Criteria, false)
|
||||
continue
|
||||
}
|
||||
|
||||
accumulatedBalance := new(big.Float)
|
||||
|
||||
chainIDLoopERC20:
|
||||
for chainID, address := range tokenRequirement.ContractAddresses {
|
||||
if _, exists := ownedERC20TokenBalances[chainID]; !exists || len(ownedERC20TokenBalances[chainID]) == 0 {
|
||||
continue chainIDLoopERC20
|
||||
}
|
||||
contractAddress := gethcommon.HexToAddress(address)
|
||||
for account := range ownedERC20TokenBalances[chainID] {
|
||||
if _, exists := ownedERC20TokenBalances[chainID][account][contractAddress]; !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
value := ownedERC20TokenBalances[chainID][account][contractAddress]
|
||||
|
||||
accountChainBalance := new(big.Float).Quo(
|
||||
new(big.Float).SetInt(value.ToInt()),
|
||||
big.NewFloat(math.Pow(10, float64(tokenRequirement.Decimals))),
|
||||
)
|
||||
|
||||
if _, exists := accountsChainIDsCombinations[account]; !exists {
|
||||
accountsChainIDsCombinations[account] = make(map[uint64]bool)
|
||||
}
|
||||
|
||||
if accountChainBalance.Cmp(big.NewFloat(0)) > 0 {
|
||||
// account has balance > 0 on this chain for this token, so let's add it the chain IDs
|
||||
accountsChainIDsCombinations[account][chainID] = true
|
||||
}
|
||||
|
||||
// check if adding current chain account balance to accumulated balance
|
||||
// satisfies required amount
|
||||
prevBalance := accumulatedBalance
|
||||
accumulatedBalance.Add(prevBalance, accountChainBalance)
|
||||
|
||||
requiredAmount, err := strconv.ParseFloat(tokenRequirement.Amount, 32)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if accumulatedBalance.Cmp(big.NewFloat(requiredAmount)) != -1 {
|
||||
tokenRequirementMet = true
|
||||
if shortcircuit {
|
||||
break chainIDLoopERC20
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} else if tokenRequirement.Type == protobuf.CommunityTokenType_ENS {
|
||||
|
||||
for _, account := range accounts {
|
||||
ownedENSNames, err := p.getOwnedENS([]gethcommon.Address{account})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, exists := accountsChainIDsCombinations[account]; !exists {
|
||||
accountsChainIDsCombinations[account] = make(map[uint64]bool)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(tokenRequirement.EnsPattern, "*.") {
|
||||
for _, ownedENS := range ownedENSNames {
|
||||
if ownedENS == tokenRequirement.EnsPattern {
|
||||
tokenRequirementMet = true
|
||||
accountsChainIDsCombinations[account][walletcommon.EthereumMainnet] = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parentName := tokenRequirement.EnsPattern[2:]
|
||||
for _, ownedENS := range ownedENSNames {
|
||||
if strings.HasSuffix(ownedENS, parentName) {
|
||||
tokenRequirementMet = true
|
||||
accountsChainIDsCombinations[account][walletcommon.EthereumMainnet] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !tokenRequirementMet {
|
||||
permissionRequirementsMet = false
|
||||
}
|
||||
|
||||
tokenRequirementResponse.Satisfied = tokenRequirementMet
|
||||
response.Permissions[tokenPermission.Id].TokenRequirements = append(response.Permissions[tokenPermission.Id].TokenRequirements, tokenRequirementResponse)
|
||||
response.Permissions[tokenPermission.Id].Criteria = append(response.Permissions[tokenPermission.Id].Criteria, tokenRequirementMet)
|
||||
}
|
||||
// multiple permissions are treated as logical OR, meaning
|
||||
// if only one of them is fulfilled, the user gets permission
|
||||
// to join and we can stop early
|
||||
if shortcircuit && permissionRequirementsMet {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// attach valid account and chainID combinations to response
|
||||
for account, chainIDs := range accountsChainIDsCombinations {
|
||||
combination := &AccountChainIDsCombination{
|
||||
Address: account,
|
||||
}
|
||||
for chainID := range chainIDs {
|
||||
combination.ChainIDs = append(combination.ChainIDs, chainID)
|
||||
}
|
||||
response.ValidCombinations = append(response.ValidCombinations, combination)
|
||||
}
|
||||
|
||||
response.calculateSatisfied()
|
||||
|
||||
return response, nil
|
||||
}
|
||||
284
vendor/github.com/status-im/status-go/protocol/communities/permissioned_balances.go
generated
vendored
Normal file
284
vendor/github.com/status-im/status-go/protocol/communities/permissioned_balances.go
generated
vendored
Normal file
@@ -0,0 +1,284 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"context"
|
||||
"math/big"
|
||||
"strconv"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
gethcommon "github.com/ethereum/go-ethereum/common"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
"github.com/status-im/status-go/services/wallet/bigint"
|
||||
"github.com/status-im/status-go/services/wallet/thirdparty"
|
||||
)
|
||||
|
||||
type PermissionedBalance struct {
|
||||
Type protobuf.CommunityTokenType `json:"type"`
|
||||
Symbol string `json:"symbol"`
|
||||
Name string `json:"name"`
|
||||
Amount *bigint.BigInt `json:"amount"`
|
||||
Decimals uint64 `json:"decimals"`
|
||||
}
|
||||
|
||||
func calculatePermissionedBalancesERC20(
|
||||
accountAddresses []gethcommon.Address,
|
||||
balances BalancesByChain,
|
||||
tokenPermissions []*CommunityTokenPermission,
|
||||
) map[gethcommon.Address]map[string]*PermissionedBalance {
|
||||
res := make(map[gethcommon.Address]map[string]*PermissionedBalance)
|
||||
|
||||
// Set with composite key (chain ID + wallet address + contract address) to
|
||||
// store if we already processed the balance.
|
||||
usedBalances := make(map[string]bool)
|
||||
|
||||
for _, permission := range tokenPermissions {
|
||||
for _, criteria := range permission.TokenCriteria {
|
||||
if criteria.Type != protobuf.CommunityTokenType_ERC20 {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, accountAddress := range accountAddresses {
|
||||
for chainID, hexContractAddress := range criteria.ContractAddresses {
|
||||
usedKey := strconv.FormatUint(chainID, 10) + "-" + accountAddress.Hex() + "-" + hexContractAddress
|
||||
|
||||
if _, ok := balances[chainID]; !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := balances[chainID][accountAddress]; !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
contractAddress := gethcommon.HexToAddress(hexContractAddress)
|
||||
value, ok := balances[chainID][accountAddress][contractAddress]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip the contract address if it has been used already in the sum.
|
||||
if _, ok := usedBalances[usedKey]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := res[accountAddress]; !ok {
|
||||
res[accountAddress] = make(map[string]*PermissionedBalance, 0)
|
||||
}
|
||||
if _, ok := res[accountAddress][criteria.Symbol]; !ok {
|
||||
res[accountAddress][criteria.Symbol] = &PermissionedBalance{
|
||||
Type: criteria.Type,
|
||||
Symbol: criteria.Symbol,
|
||||
Name: criteria.Name,
|
||||
Decimals: criteria.Decimals,
|
||||
Amount: &bigint.BigInt{Int: big.NewInt(0)},
|
||||
}
|
||||
}
|
||||
|
||||
res[accountAddress][criteria.Symbol].Amount.Add(
|
||||
res[accountAddress][criteria.Symbol].Amount.Int,
|
||||
value.ToInt(),
|
||||
)
|
||||
usedBalances[usedKey] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func isERC721CriteriaSatisfied(tokenBalances []thirdparty.TokenBalance, criteria *protobuf.TokenCriteria) bool {
|
||||
// No token IDs to compare against, so the criteria is satisfied.
|
||||
if len(criteria.TokenIds) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, tokenID := range criteria.TokenIds {
|
||||
tokenIDBigInt := new(big.Int).SetUint64(tokenID)
|
||||
for _, asset := range tokenBalances {
|
||||
if asset.TokenID.Cmp(tokenIDBigInt) == 0 && asset.Balance.Sign() > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *Manager) calculatePermissionedBalancesERC721(
|
||||
accountAddresses []gethcommon.Address,
|
||||
balances CollectiblesByChain,
|
||||
tokenPermissions []*CommunityTokenPermission,
|
||||
) map[gethcommon.Address]map[string]*PermissionedBalance {
|
||||
res := make(map[gethcommon.Address]map[string]*PermissionedBalance)
|
||||
|
||||
// Set with composite key (chain ID + wallet address + contract address) to
|
||||
// store if we already processed the balance.
|
||||
usedBalances := make(map[string]bool)
|
||||
|
||||
for _, permission := range tokenPermissions {
|
||||
for _, criteria := range permission.TokenCriteria {
|
||||
if criteria.Type != protobuf.CommunityTokenType_ERC721 {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, accountAddress := range accountAddresses {
|
||||
for chainID, hexContractAddress := range criteria.ContractAddresses {
|
||||
usedKey := strconv.FormatUint(chainID, 10) + "-" + accountAddress.Hex() + "-" + hexContractAddress
|
||||
|
||||
if _, ok := balances[chainID]; !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := balances[chainID][accountAddress]; !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
contractAddress := gethcommon.HexToAddress(hexContractAddress)
|
||||
tokenBalances, ok := balances[chainID][accountAddress][contractAddress]
|
||||
if !ok || len(tokenBalances) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip the contract address if it has been used already in the sum.
|
||||
if _, ok := usedBalances[usedKey]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
usedBalances[usedKey] = true
|
||||
|
||||
if _, ok := res[accountAddress]; !ok {
|
||||
res[accountAddress] = make(map[string]*PermissionedBalance, 0)
|
||||
}
|
||||
if _, ok := res[accountAddress][criteria.Symbol]; !ok {
|
||||
res[accountAddress][criteria.Symbol] = &PermissionedBalance{
|
||||
Type: criteria.Type,
|
||||
Symbol: criteria.Symbol,
|
||||
Name: criteria.Name,
|
||||
Decimals: criteria.Decimals,
|
||||
Amount: &bigint.BigInt{Int: big.NewInt(0)},
|
||||
}
|
||||
}
|
||||
|
||||
if isERC721CriteriaSatisfied(tokenBalances, criteria) {
|
||||
// We don't care about summing balances, thus setting as 1 is
|
||||
// sufficient.
|
||||
res[accountAddress][criteria.Symbol].Amount = &bigint.BigInt{Int: big.NewInt(1)}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (m *Manager) calculatePermissionedBalances(
|
||||
chainIDs []uint64,
|
||||
accountAddresses []gethcommon.Address,
|
||||
erc20Balances BalancesByChain,
|
||||
erc721Balances CollectiblesByChain,
|
||||
tokenPermissions []*CommunityTokenPermission,
|
||||
) map[gethcommon.Address][]PermissionedBalance {
|
||||
res := make(map[gethcommon.Address][]PermissionedBalance, 0)
|
||||
|
||||
aggregatedERC721Balances := m.calculatePermissionedBalancesERC721(accountAddresses, erc721Balances, tokenPermissions)
|
||||
for accountAddress, tokens := range aggregatedERC721Balances {
|
||||
for _, permissionedToken := range tokens {
|
||||
if permissionedToken.Amount.Sign() > 0 {
|
||||
res[accountAddress] = append(res[accountAddress], *permissionedToken)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
aggregatedERC20Balances := calculatePermissionedBalancesERC20(accountAddresses, erc20Balances, tokenPermissions)
|
||||
for accountAddress, tokens := range aggregatedERC20Balances {
|
||||
for _, permissionedToken := range tokens {
|
||||
if permissionedToken.Amount.Sign() > 0 {
|
||||
res[accountAddress] = append(res[accountAddress], *permissionedToken)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func keepRoleTokenPermissions(tokenPermissions map[string]*CommunityTokenPermission) []*CommunityTokenPermission {
|
||||
res := make([]*CommunityTokenPermission, 0)
|
||||
for _, p := range tokenPermissions {
|
||||
if p.Type == protobuf.CommunityTokenPermission_BECOME_MEMBER ||
|
||||
p.Type == protobuf.CommunityTokenPermission_BECOME_ADMIN ||
|
||||
p.Type == protobuf.CommunityTokenPermission_BECOME_TOKEN_MASTER ||
|
||||
p.Type == protobuf.CommunityTokenPermission_BECOME_TOKEN_OWNER {
|
||||
res = append(res, p)
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// GetPermissionedBalances returns balances indexed by account address.
|
||||
//
|
||||
// It assumes balances in different chains with the same symbol can be summed.
|
||||
// It also assumes the criteria's decimals field is the same across different
|
||||
// criteria when they refer to the same asset (by symbol).
|
||||
func (m *Manager) GetPermissionedBalances(
|
||||
ctx context.Context,
|
||||
communityID types.HexBytes,
|
||||
accountAddresses []gethcommon.Address,
|
||||
) (map[gethcommon.Address][]PermissionedBalance, error) {
|
||||
community, err := m.GetByID(communityID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if community == nil {
|
||||
return nil, errors.Errorf("community does not exist ID='%s'", communityID)
|
||||
}
|
||||
|
||||
tokenPermissions := keepRoleTokenPermissions(community.TokenPermissions())
|
||||
|
||||
allChainIDs, err := m.tokenManager.GetAllChainIDs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
accountsAndChainIDs := combineAddressesAndChainIDs(accountAddresses, allChainIDs)
|
||||
|
||||
erc20TokenCriteriaByChain, erc721TokenCriteriaByChain, _ := ExtractTokenCriteria(tokenPermissions)
|
||||
|
||||
accounts := make([]gethcommon.Address, 0, len(accountsAndChainIDs))
|
||||
for _, accountAndChainIDs := range accountsAndChainIDs {
|
||||
accounts = append(accounts, accountAndChainIDs.Address)
|
||||
}
|
||||
|
||||
erc20ChainIDsSet := make(map[uint64]bool)
|
||||
erc20TokenAddresses := make([]gethcommon.Address, 0)
|
||||
for chainID, criterionByContractAddress := range erc20TokenCriteriaByChain {
|
||||
erc20ChainIDsSet[chainID] = true
|
||||
for contractAddress := range criterionByContractAddress {
|
||||
erc20TokenAddresses = append(erc20TokenAddresses, gethcommon.HexToAddress(contractAddress))
|
||||
}
|
||||
}
|
||||
|
||||
erc721ChainIDsSet := make(map[uint64]bool)
|
||||
for chainID := range erc721TokenCriteriaByChain {
|
||||
erc721ChainIDsSet[chainID] = true
|
||||
}
|
||||
|
||||
erc20ChainIDs := calculateChainIDsSet(accountsAndChainIDs, erc20ChainIDsSet)
|
||||
erc721ChainIDs := calculateChainIDsSet(accountsAndChainIDs, erc721ChainIDsSet)
|
||||
|
||||
erc20Balances, err := m.tokenManager.GetBalancesByChain(ctx, accounts, erc20TokenAddresses, erc20ChainIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
erc721Balances := make(CollectiblesByChain)
|
||||
if len(erc721ChainIDs) > 0 {
|
||||
balances, err := m.GetOwnedERC721Tokens(accounts, erc721TokenCriteriaByChain, erc721ChainIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
erc721Balances = balances
|
||||
}
|
||||
|
||||
return m.calculatePermissionedBalances(allChainIDs, accountAddresses, erc20Balances, erc721Balances, tokenPermissions), nil
|
||||
}
|
||||
1783
vendor/github.com/status-im/status-go/protocol/communities/persistence.go
generated
vendored
Normal file
1783
vendor/github.com/status-im/status-go/protocol/communities/persistence.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
163
vendor/github.com/status-im/status-go/protocol/communities/persistence_mapping.go
generated
vendored
Normal file
163
vendor/github.com/status-im/status-go/protocol/communities/persistence_mapping.go
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/common/shard"
|
||||
)
|
||||
|
||||
func communityToRecord(community *Community) (*CommunityRecord, error) {
|
||||
wrappedDescription, err := community.ToProtocolMessageBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var shardIndex, shardCluster *uint
|
||||
if community.Shard() != nil {
|
||||
index := uint(community.Shard().Index)
|
||||
shardIndex = &index
|
||||
cluster := uint(community.Shard().Cluster)
|
||||
shardCluster = &cluster
|
||||
}
|
||||
|
||||
return &CommunityRecord{
|
||||
id: community.ID(),
|
||||
privateKey: crypto.FromECDSA(community.PrivateKey()),
|
||||
controlNode: crypto.FromECDSAPub(community.ControlNode()),
|
||||
description: wrappedDescription,
|
||||
joined: community.config.Joined,
|
||||
joinedAt: community.config.JoinedAt,
|
||||
lastOpenedAt: community.config.LastOpenedAt,
|
||||
verified: community.config.Verified,
|
||||
spectated: community.config.Spectated,
|
||||
muted: community.config.Muted,
|
||||
mutedTill: community.config.MuteTill,
|
||||
shardCluster: shardCluster,
|
||||
shardIndex: shardIndex,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func communityToEventsRecord(community *Community) (*EventsRecord, error) {
|
||||
if community.config.EventsData == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
rawEvents, err := communityEventsToJSONEncodedBytes(community.config.EventsData.Events)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &EventsRecord{
|
||||
id: community.ID(),
|
||||
rawEvents: rawEvents,
|
||||
rawDescription: community.config.EventsData.EventsBaseCommunityDescription,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func recordToRequestToJoin(r *RequestToJoinRecord) *RequestToJoin {
|
||||
// FIXME: fill revealed addresses
|
||||
return &RequestToJoin{
|
||||
ID: r.id,
|
||||
PublicKey: r.publicKey,
|
||||
Clock: uint64(r.clock),
|
||||
ENSName: r.ensName,
|
||||
ChatID: r.chatID,
|
||||
CommunityID: r.communityID,
|
||||
State: RequestToJoinState(r.state),
|
||||
}
|
||||
}
|
||||
|
||||
func recordBundleToCommunity(r *CommunityRecordBundle, memberIdentity *ecdsa.PublicKey, installationID string,
|
||||
logger *zap.Logger, timesource common.TimeSource, encryptor DescriptionEncryptor, initializer func(*Community) error) (*Community, error) {
|
||||
var privateKey *ecdsa.PrivateKey
|
||||
var controlNode *ecdsa.PublicKey
|
||||
var err error
|
||||
|
||||
if r.community.privateKey != nil {
|
||||
privateKey, err = crypto.ToECDSA(r.community.privateKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if r.community.controlNode != nil {
|
||||
controlNode, err = crypto.UnmarshalPubkey(r.community.controlNode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
description, err := decodeWrappedCommunityDescription(r.community.description)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
id, err := crypto.DecompressPubkey(r.community.id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var eventsData *EventsData
|
||||
if r.events != nil {
|
||||
eventsData, err = decodeEventsData(r.events.rawEvents, r.events.rawDescription)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
var s *shard.Shard = nil
|
||||
if r.community.shardCluster != nil && r.community.shardIndex != nil {
|
||||
s = &shard.Shard{
|
||||
Cluster: uint16(*r.community.shardCluster),
|
||||
Index: uint16(*r.community.shardIndex),
|
||||
}
|
||||
}
|
||||
|
||||
isControlDevice := r.installationID != nil && *r.installationID == installationID
|
||||
|
||||
config := Config{
|
||||
PrivateKey: privateKey,
|
||||
ControlNode: controlNode,
|
||||
ControlDevice: isControlDevice,
|
||||
CommunityDescription: description,
|
||||
MemberIdentity: memberIdentity,
|
||||
CommunityDescriptionProtocolMessage: r.community.description,
|
||||
Logger: logger,
|
||||
ID: id,
|
||||
Verified: r.community.verified,
|
||||
Muted: r.community.muted,
|
||||
MuteTill: r.community.mutedTill,
|
||||
Joined: r.community.joined,
|
||||
JoinedAt: r.community.joinedAt,
|
||||
LastOpenedAt: r.community.lastOpenedAt,
|
||||
Spectated: r.community.spectated,
|
||||
EventsData: eventsData,
|
||||
Shard: s,
|
||||
}
|
||||
|
||||
community, err := New(config, timesource, encryptor)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if r.requestToJoin != nil {
|
||||
community.config.RequestedToJoinAt = uint64(r.requestToJoin.clock)
|
||||
requestToJoin := recordToRequestToJoin(r.requestToJoin)
|
||||
if !requestToJoin.Empty() {
|
||||
community.AddRequestToJoin(requestToJoin)
|
||||
}
|
||||
}
|
||||
|
||||
if initializer != nil {
|
||||
err = initializer(community)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return community, nil
|
||||
}
|
||||
132
vendor/github.com/status-im/status-go/protocol/communities/persistence_test_helpers.go
generated
vendored
Normal file
132
vendor/github.com/status-im/status-go/protocol/communities/persistence_test_helpers.go
generated
vendored
Normal file
@@ -0,0 +1,132 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type RawCommunityRow struct {
|
||||
ID []byte
|
||||
PrivateKey []byte
|
||||
Description []byte
|
||||
Joined bool
|
||||
JoinedAt int64
|
||||
Spectated bool
|
||||
Verified bool
|
||||
SyncedAt uint64
|
||||
Muted bool
|
||||
LastOpenedAt int64
|
||||
}
|
||||
|
||||
func fromSyncCommunityProtobuf(syncCommProto *protobuf.SyncInstallationCommunity) RawCommunityRow {
|
||||
return RawCommunityRow{
|
||||
ID: syncCommProto.Id,
|
||||
Description: syncCommProto.Description,
|
||||
Joined: syncCommProto.Joined,
|
||||
JoinedAt: syncCommProto.JoinedAt,
|
||||
Spectated: syncCommProto.Spectated,
|
||||
Verified: syncCommProto.Verified,
|
||||
SyncedAt: syncCommProto.Clock,
|
||||
Muted: syncCommProto.Muted,
|
||||
LastOpenedAt: syncCommProto.LastOpenedAt,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Persistence) scanRowToStruct(rowScan func(dest ...interface{}) error) (*RawCommunityRow, error) {
|
||||
rcr := new(RawCommunityRow)
|
||||
var syncedAt, muteTill sql.NullTime
|
||||
|
||||
err := rowScan(
|
||||
&rcr.ID,
|
||||
&rcr.PrivateKey,
|
||||
&rcr.Description,
|
||||
&rcr.Joined,
|
||||
&rcr.JoinedAt,
|
||||
&rcr.Verified,
|
||||
&rcr.Spectated,
|
||||
&rcr.Muted,
|
||||
&muteTill,
|
||||
&syncedAt,
|
||||
&rcr.LastOpenedAt,
|
||||
)
|
||||
if syncedAt.Valid {
|
||||
rcr.SyncedAt = uint64(syncedAt.Time.Unix())
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return rcr, nil
|
||||
}
|
||||
|
||||
func (p *Persistence) getAllCommunitiesRaw() (rcrs []*RawCommunityRow, err error) {
|
||||
var rows *sql.Rows
|
||||
// Keep "*", if the db table is updated, syncing needs to match, this fail will force us to update syncing.
|
||||
rows, err = p.db.Query(`SELECT id, private_key, description, joined, joined_at, verified, spectated, muted, muted_till, synced_at, last_opened_at FROM communities_communities`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err != nil {
|
||||
// Don't shadow original error
|
||||
_ = rows.Close()
|
||||
return
|
||||
|
||||
}
|
||||
err = rows.Close()
|
||||
}()
|
||||
|
||||
for rows.Next() {
|
||||
rcr, err := p.scanRowToStruct(rows.Scan)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rcrs = append(rcrs, rcr)
|
||||
}
|
||||
return rcrs, nil
|
||||
}
|
||||
|
||||
func (p *Persistence) getRawCommunityRow(id []byte) (*RawCommunityRow, error) {
|
||||
qr := p.db.QueryRow(`SELECT id, private_key, description, joined, joined_at, verified, spectated, muted, muted_till, synced_at, last_opened_at FROM communities_communities WHERE id = ?`, id)
|
||||
return p.scanRowToStruct(qr.Scan)
|
||||
}
|
||||
|
||||
func (p *Persistence) getSyncedRawCommunity(id []byte) (*RawCommunityRow, error) {
|
||||
qr := p.db.QueryRow(`SELECT id, private_key, description, joined, joined_at, verified, spectated, muted, muted_till, synced_at, last_opened_at FROM communities_communities WHERE id = ? AND synced_at > 0`, id)
|
||||
return p.scanRowToStruct(qr.Scan)
|
||||
}
|
||||
|
||||
func (p *Persistence) saveRawCommunityRow(rawCommRow RawCommunityRow) error {
|
||||
_, err := p.db.Exec(
|
||||
`INSERT INTO communities_communities ("id", "private_key", "description", "joined", "joined_at", "verified", "synced_at", "muted", "last_opened_at") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
rawCommRow.ID,
|
||||
rawCommRow.PrivateKey,
|
||||
rawCommRow.Description,
|
||||
rawCommRow.Joined,
|
||||
rawCommRow.JoinedAt,
|
||||
rawCommRow.Verified,
|
||||
rawCommRow.SyncedAt,
|
||||
rawCommRow.Muted,
|
||||
rawCommRow.LastOpenedAt,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *Persistence) saveRawCommunityRowWithoutSyncedAt(rawCommRow RawCommunityRow) error {
|
||||
_, err := p.db.Exec(
|
||||
`INSERT INTO communities_communities ("id", "private_key", "description", "joined", "joined_at", "verified", "muted", "last_opened_at") VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
rawCommRow.ID,
|
||||
rawCommRow.PrivateKey,
|
||||
rawCommRow.Description,
|
||||
rawCommRow.Joined,
|
||||
rawCommRow.JoinedAt,
|
||||
rawCommRow.Verified,
|
||||
rawCommRow.Muted,
|
||||
rawCommRow.LastOpenedAt,
|
||||
)
|
||||
return err
|
||||
}
|
||||
101
vendor/github.com/status-im/status-go/protocol/communities/request_to_join.go
generated
vendored
Normal file
101
vendor/github.com/status-im/status-go/protocol/communities/request_to_join.go
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type RequestToJoinState uint
|
||||
|
||||
const (
|
||||
RequestToJoinStatePending RequestToJoinState = iota + 1
|
||||
RequestToJoinStateDeclined
|
||||
RequestToJoinStateAccepted
|
||||
RequestToJoinStateCanceled
|
||||
RequestToJoinStateAcceptedPending
|
||||
RequestToJoinStateDeclinedPending
|
||||
RequestToJoinStateAwaitingAddresses
|
||||
)
|
||||
|
||||
type RequestToJoin struct {
|
||||
ID types.HexBytes `json:"id"`
|
||||
PublicKey string `json:"publicKey"`
|
||||
Clock uint64 `json:"clock"`
|
||||
ENSName string `json:"ensName,omitempty"`
|
||||
ChatID string `json:"chatId"`
|
||||
CommunityID types.HexBytes `json:"communityId"`
|
||||
State RequestToJoinState `json:"state"`
|
||||
Our bool `json:"our"`
|
||||
Deleted bool `json:"deleted"`
|
||||
RevealedAccounts []*protobuf.RevealedAccount `json:"revealedAccounts,omitempty"`
|
||||
}
|
||||
|
||||
func (r *RequestToJoin) CalculateID() {
|
||||
r.ID = CalculateRequestID(r.PublicKey, r.CommunityID)
|
||||
}
|
||||
|
||||
func (r *RequestToJoin) ToCommunityRequestToJoinProtobuf() *protobuf.CommunityRequestToJoin {
|
||||
return &protobuf.CommunityRequestToJoin{
|
||||
Clock: r.Clock,
|
||||
EnsName: r.ENSName,
|
||||
CommunityId: r.CommunityID,
|
||||
RevealedAccounts: r.RevealedAccounts,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *RequestToJoin) ToSyncProtobuf() *protobuf.SyncCommunityRequestsToJoin {
|
||||
return &protobuf.SyncCommunityRequestsToJoin{
|
||||
Id: r.ID,
|
||||
PublicKey: r.PublicKey,
|
||||
Clock: r.Clock,
|
||||
EnsName: r.ENSName,
|
||||
ChatId: r.ChatID,
|
||||
CommunityId: r.CommunityID,
|
||||
State: uint64(r.State),
|
||||
RevealedAccounts: r.RevealedAccounts,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *RequestToJoin) InitFromSyncProtobuf(proto *protobuf.SyncCommunityRequestsToJoin) {
|
||||
r.ID = proto.Id
|
||||
r.PublicKey = proto.PublicKey
|
||||
r.Clock = proto.Clock
|
||||
r.ENSName = proto.EnsName
|
||||
r.ChatID = proto.ChatId
|
||||
r.CommunityID = proto.CommunityId
|
||||
r.State = RequestToJoinState(proto.State)
|
||||
r.RevealedAccounts = proto.RevealedAccounts
|
||||
}
|
||||
|
||||
func (r *RequestToJoin) Empty() bool {
|
||||
return len(r.ID)+len(r.PublicKey)+int(r.Clock)+len(r.ENSName)+len(r.ChatID)+len(r.CommunityID)+int(r.State) == 0
|
||||
}
|
||||
|
||||
func (r *RequestToJoin) ShouldRetainDeclined(clock uint64) (bool, error) {
|
||||
if r.State != RequestToJoinStateDeclined {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
declineExpiryClock, err := AddTimeoutToRequestToJoinClock(r.Clock)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return clock < declineExpiryClock, nil
|
||||
}
|
||||
|
||||
func AddTimeoutToRequestToJoinClock(clock uint64) (uint64, error) {
|
||||
requestToJoinClock, err := strconv.ParseInt(fmt.Sprint(clock), 10, 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Adding 7 days to the request clock
|
||||
requestTimeOutClock := uint64(time.Unix(requestToJoinClock, 0).AddDate(0, 0, 7).Unix())
|
||||
|
||||
return requestTimeOutClock, nil
|
||||
}
|
||||
22
vendor/github.com/status-im/status-go/protocol/communities/request_to_leave.go
generated
vendored
Normal file
22
vendor/github.com/status-im/status-go/protocol/communities/request_to_leave.go
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type RequestToLeave struct {
|
||||
ID types.HexBytes `json:"id"`
|
||||
PublicKey string `json:"publicKey"`
|
||||
Clock uint64 `json:"clock"`
|
||||
CommunityID types.HexBytes `json:"communityId"`
|
||||
}
|
||||
|
||||
func NewRequestToLeave(publicKey string, protobuf *protobuf.CommunityRequestToLeave) *RequestToLeave {
|
||||
return &RequestToLeave{
|
||||
ID: CalculateRequestID(publicKey, protobuf.CommunityId),
|
||||
PublicKey: publicKey,
|
||||
Clock: protobuf.Clock,
|
||||
CommunityID: protobuf.CommunityId,
|
||||
}
|
||||
}
|
||||
120
vendor/github.com/status-im/status-go/protocol/communities/roles_authorization.go
generated
vendored
Normal file
120
vendor/github.com/status-im/status-go/protocol/communities/roles_authorization.go
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
var adminAuthorizedEventTypes = []protobuf.CommunityEvent_EventType{
|
||||
protobuf.CommunityEvent_COMMUNITY_EDIT,
|
||||
protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_CHANGE,
|
||||
protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_DELETE,
|
||||
protobuf.CommunityEvent_COMMUNITY_CATEGORY_CREATE,
|
||||
protobuf.CommunityEvent_COMMUNITY_CATEGORY_DELETE,
|
||||
protobuf.CommunityEvent_COMMUNITY_CATEGORY_EDIT,
|
||||
protobuf.CommunityEvent_COMMUNITY_CHANNEL_CREATE,
|
||||
protobuf.CommunityEvent_COMMUNITY_CHANNEL_DELETE,
|
||||
protobuf.CommunityEvent_COMMUNITY_CHANNEL_EDIT,
|
||||
protobuf.CommunityEvent_COMMUNITY_CATEGORY_REORDER,
|
||||
protobuf.CommunityEvent_COMMUNITY_CHANNEL_REORDER,
|
||||
protobuf.CommunityEvent_COMMUNITY_REQUEST_TO_JOIN_ACCEPT,
|
||||
protobuf.CommunityEvent_COMMUNITY_REQUEST_TO_JOIN_REJECT,
|
||||
protobuf.CommunityEvent_COMMUNITY_MEMBER_KICK,
|
||||
protobuf.CommunityEvent_COMMUNITY_MEMBER_BAN,
|
||||
protobuf.CommunityEvent_COMMUNITY_MEMBER_UNBAN,
|
||||
}
|
||||
|
||||
var tokenMasterAuthorizedEventTypes = append(adminAuthorizedEventTypes, []protobuf.CommunityEvent_EventType{
|
||||
protobuf.CommunityEvent_COMMUNITY_TOKEN_ADD,
|
||||
}...)
|
||||
|
||||
var ownerAuthorizedEventTypes = tokenMasterAuthorizedEventTypes
|
||||
|
||||
var rolesToAuthorizedEventTypes = map[protobuf.CommunityMember_Roles][]protobuf.CommunityEvent_EventType{
|
||||
protobuf.CommunityMember_ROLE_NONE: []protobuf.CommunityEvent_EventType{},
|
||||
protobuf.CommunityMember_ROLE_OWNER: ownerAuthorizedEventTypes,
|
||||
protobuf.CommunityMember_ROLE_ADMIN: adminAuthorizedEventTypes,
|
||||
protobuf.CommunityMember_ROLE_TOKEN_MASTER: tokenMasterAuthorizedEventTypes,
|
||||
}
|
||||
|
||||
var adminAuthorizedPermissionTypes = []protobuf.CommunityTokenPermission_Type{
|
||||
protobuf.CommunityTokenPermission_BECOME_MEMBER,
|
||||
protobuf.CommunityTokenPermission_CAN_VIEW_CHANNEL,
|
||||
protobuf.CommunityTokenPermission_CAN_VIEW_AND_POST_CHANNEL,
|
||||
}
|
||||
|
||||
var tokenMasterAuthorizedPermissionTypes = append(adminAuthorizedPermissionTypes, []protobuf.CommunityTokenPermission_Type{}...)
|
||||
|
||||
var ownerAuthorizedPermissionTypes = append(tokenMasterAuthorizedPermissionTypes, []protobuf.CommunityTokenPermission_Type{
|
||||
protobuf.CommunityTokenPermission_BECOME_ADMIN,
|
||||
protobuf.CommunityTokenPermission_BECOME_TOKEN_MASTER,
|
||||
}...)
|
||||
|
||||
var rolesToAuthorizedPermissionTypes = map[protobuf.CommunityMember_Roles][]protobuf.CommunityTokenPermission_Type{
|
||||
protobuf.CommunityMember_ROLE_NONE: []protobuf.CommunityTokenPermission_Type{},
|
||||
protobuf.CommunityMember_ROLE_OWNER: ownerAuthorizedPermissionTypes,
|
||||
protobuf.CommunityMember_ROLE_ADMIN: adminAuthorizedPermissionTypes,
|
||||
protobuf.CommunityMember_ROLE_TOKEN_MASTER: tokenMasterAuthorizedPermissionTypes,
|
||||
}
|
||||
|
||||
func canRolesPerformEvent(roles []protobuf.CommunityMember_Roles, eventType protobuf.CommunityEvent_EventType) bool {
|
||||
for _, role := range roles {
|
||||
if slices.Contains(rolesToAuthorizedEventTypes[role], eventType) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func canRolesModifyPermission(roles []protobuf.CommunityMember_Roles, permissionType protobuf.CommunityTokenPermission_Type) bool {
|
||||
for _, role := range roles {
|
||||
if slices.Contains(rolesToAuthorizedPermissionTypes[role], permissionType) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func canRolesKickOrBanMember(senderRoles []protobuf.CommunityMember_Roles, memberRoles []protobuf.CommunityMember_Roles) bool {
|
||||
// Owner can kick everyone
|
||||
if slices.Contains(senderRoles, protobuf.CommunityMember_ROLE_OWNER) {
|
||||
return true
|
||||
}
|
||||
|
||||
// TokenMaster can kick normal members and admins
|
||||
if (slices.Contains(senderRoles, protobuf.CommunityMember_ROLE_TOKEN_MASTER)) &&
|
||||
!(slices.Contains(memberRoles, protobuf.CommunityMember_ROLE_TOKEN_MASTER) ||
|
||||
slices.Contains(memberRoles, protobuf.CommunityMember_ROLE_OWNER)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Admins can kick normal members
|
||||
if (slices.Contains(senderRoles, protobuf.CommunityMember_ROLE_ADMIN)) &&
|
||||
!(slices.Contains(memberRoles, protobuf.CommunityMember_ROLE_ADMIN) ||
|
||||
slices.Contains(memberRoles, protobuf.CommunityMember_ROLE_TOKEN_MASTER) ||
|
||||
slices.Contains(memberRoles, protobuf.CommunityMember_ROLE_OWNER)) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Normal members can't kick anyone
|
||||
return false
|
||||
}
|
||||
|
||||
func RolesAuthorizedToPerformEvent(senderRoles []protobuf.CommunityMember_Roles, memberRoles []protobuf.CommunityMember_Roles, event *CommunityEvent) bool {
|
||||
if !canRolesPerformEvent(senderRoles, event.Type) {
|
||||
return false
|
||||
}
|
||||
|
||||
if event.Type == protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_CHANGE ||
|
||||
event.Type == protobuf.CommunityEvent_COMMUNITY_MEMBER_TOKEN_PERMISSION_DELETE {
|
||||
return canRolesModifyPermission(senderRoles, event.TokenPermission.Type)
|
||||
}
|
||||
|
||||
if event.Type == protobuf.CommunityEvent_COMMUNITY_MEMBER_BAN ||
|
||||
event.Type == protobuf.CommunityEvent_COMMUNITY_MEMBER_KICK {
|
||||
return canRolesKickOrBanMember(senderRoles, memberRoles)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
41
vendor/github.com/status-im/status-go/protocol/communities/token/community_token.go
generated
vendored
Normal file
41
vendor/github.com/status-im/status-go/protocol/communities/token/community_token.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
package token
|
||||
|
||||
import (
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
"github.com/status-im/status-go/services/wallet/bigint"
|
||||
)
|
||||
|
||||
type DeployState uint8
|
||||
|
||||
const (
|
||||
Failed DeployState = iota
|
||||
InProgress
|
||||
Deployed
|
||||
)
|
||||
|
||||
type PrivilegesLevel uint8
|
||||
|
||||
const (
|
||||
OwnerLevel PrivilegesLevel = iota
|
||||
MasterLevel
|
||||
CommunityLevel
|
||||
)
|
||||
|
||||
type CommunityToken struct {
|
||||
TokenType protobuf.CommunityTokenType `json:"tokenType"`
|
||||
CommunityID string `json:"communityId"`
|
||||
Address string `json:"address"`
|
||||
Name string `json:"name"`
|
||||
Symbol string `json:"symbol"`
|
||||
Description string `json:"description"`
|
||||
Supply *bigint.BigInt `json:"supply"`
|
||||
InfiniteSupply bool `json:"infiniteSupply"`
|
||||
Transferable bool `json:"transferable"`
|
||||
RemoteSelfDestruct bool `json:"remoteSelfDestruct"`
|
||||
ChainID int `json:"chainId"`
|
||||
DeployState DeployState `json:"deployState"`
|
||||
Base64Image string `json:"image"`
|
||||
Decimals int `json:"decimals"`
|
||||
Deployer string `json:"deployer"`
|
||||
PrivilegesLevel PrivilegesLevel `json:"privilegesLevel"`
|
||||
}
|
||||
59
vendor/github.com/status-im/status-go/protocol/communities/utils.go
generated
vendored
Normal file
59
vendor/github.com/status-im/status-go/protocol/communities/utils.go
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
func CalculateRequestID(publicKey string, communityID types.HexBytes) types.HexBytes {
|
||||
idString := fmt.Sprintf("%s-%s", publicKey, communityID)
|
||||
return crypto.Keccak256([]byte(idString))
|
||||
}
|
||||
|
||||
func ExtractTokenCriteria(permissions []*CommunityTokenPermission) (erc20TokenCriteria map[uint64]map[string]*protobuf.TokenCriteria, erc721TokenCriteria map[uint64]map[string]*protobuf.TokenCriteria, ensTokenCriteria []string) {
|
||||
erc20TokenCriteria = make(map[uint64]map[string]*protobuf.TokenCriteria)
|
||||
erc721TokenCriteria = make(map[uint64]map[string]*protobuf.TokenCriteria)
|
||||
ensTokenCriteria = make([]string, 0)
|
||||
|
||||
for _, tokenPermission := range permissions {
|
||||
for _, tokenRequirement := range tokenPermission.TokenCriteria {
|
||||
|
||||
isERC721 := tokenRequirement.Type == protobuf.CommunityTokenType_ERC721
|
||||
isERC20 := tokenRequirement.Type == protobuf.CommunityTokenType_ERC20
|
||||
isENS := tokenRequirement.Type == protobuf.CommunityTokenType_ENS
|
||||
|
||||
for chainID, contractAddress := range tokenRequirement.ContractAddresses {
|
||||
|
||||
_, existsERC721 := erc721TokenCriteria[chainID]
|
||||
|
||||
if isERC721 && !existsERC721 {
|
||||
erc721TokenCriteria[chainID] = make(map[string]*protobuf.TokenCriteria)
|
||||
}
|
||||
_, existsERC20 := erc20TokenCriteria[chainID]
|
||||
|
||||
if isERC20 && !existsERC20 {
|
||||
erc20TokenCriteria[chainID] = make(map[string]*protobuf.TokenCriteria)
|
||||
}
|
||||
|
||||
_, existsERC721 = erc721TokenCriteria[chainID][contractAddress]
|
||||
if isERC721 && !existsERC721 {
|
||||
erc721TokenCriteria[chainID][strings.ToLower(contractAddress)] = tokenRequirement
|
||||
}
|
||||
|
||||
_, existsERC20 = erc20TokenCriteria[chainID][contractAddress]
|
||||
if isERC20 && !existsERC20 {
|
||||
erc20TokenCriteria[chainID][strings.ToLower(contractAddress)] = tokenRequirement
|
||||
}
|
||||
|
||||
if isENS {
|
||||
ensTokenCriteria = append(ensTokenCriteria, tokenRequirement.EnsPattern)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
83
vendor/github.com/status-im/status-go/protocol/communities/validator.go
generated
vendored
Normal file
83
vendor/github.com/status-im/status-go/protocol/communities/validator.go
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
package communities
|
||||
|
||||
import (
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
"github.com/status-im/status-go/protocol/requests"
|
||||
)
|
||||
|
||||
func validateCommunityChat(desc *protobuf.CommunityDescription, chat *protobuf.CommunityChat) error {
|
||||
if chat == nil {
|
||||
return ErrInvalidCommunityDescription
|
||||
}
|
||||
if chat.Permissions == nil {
|
||||
return ErrInvalidCommunityDescriptionNoChatPermissions
|
||||
}
|
||||
if chat.Permissions.Access == protobuf.CommunityPermissions_UNKNOWN_ACCESS {
|
||||
return ErrInvalidCommunityDescriptionUnknownChatAccess
|
||||
}
|
||||
|
||||
if len(chat.CategoryId) != 0 {
|
||||
if _, exists := desc.Categories[chat.CategoryId]; !exists {
|
||||
return ErrInvalidCommunityDescriptionUnknownChatCategory
|
||||
}
|
||||
}
|
||||
|
||||
if chat.Identity == nil {
|
||||
return ErrInvalidCommunityDescriptionChatIdentity
|
||||
}
|
||||
|
||||
for pk := range chat.Members {
|
||||
if desc.Members == nil {
|
||||
return ErrInvalidCommunityDescriptionMemberInChatButNotInOrg
|
||||
}
|
||||
// Check member is in the org as well
|
||||
if _, ok := desc.Members[pk]; !ok {
|
||||
return ErrInvalidCommunityDescriptionMemberInChatButNotInOrg
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateCommunityCategory(category *protobuf.CommunityCategory) error {
|
||||
if len(category.CategoryId) == 0 {
|
||||
return ErrInvalidCommunityDescriptionCategoryNoID
|
||||
}
|
||||
|
||||
if len(category.Name) == 0 {
|
||||
return ErrInvalidCommunityDescriptionCategoryNoName
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ValidateCommunityDescription(desc *protobuf.CommunityDescription) error {
|
||||
if desc == nil {
|
||||
return ErrInvalidCommunityDescription
|
||||
}
|
||||
if desc.Permissions == nil {
|
||||
return ErrInvalidCommunityDescriptionNoOrgPermissions
|
||||
}
|
||||
if desc.Permissions.Access == protobuf.CommunityPermissions_UNKNOWN_ACCESS {
|
||||
return ErrInvalidCommunityDescriptionUnknownOrgAccess
|
||||
}
|
||||
|
||||
valid := requests.ValidateTags(desc.Tags)
|
||||
if !valid {
|
||||
return ErrInvalidCommunityTags
|
||||
}
|
||||
|
||||
for _, category := range desc.Categories {
|
||||
if err := validateCommunityCategory(category); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
for _, chat := range desc.Chats {
|
||||
if err := validateCommunityChat(desc, chat); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
106
vendor/github.com/status-im/status-go/protocol/communities_key_distributor.go
generated
vendored
Normal file
106
vendor/github.com/status-im/status-go/protocol/communities_key_distributor.go
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/communities"
|
||||
"github.com/status-im/status-go/protocol/encryption"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type CommunitiesKeyDistributorImpl struct {
|
||||
sender *common.MessageSender
|
||||
encryptor *encryption.Protocol
|
||||
}
|
||||
|
||||
func (ckd *CommunitiesKeyDistributorImpl) Generate(community *communities.Community, keyActions *communities.EncryptionKeyActions) error {
|
||||
if !community.IsControlNode() {
|
||||
return communities.ErrNotControlNode
|
||||
}
|
||||
return iterateActions(community, keyActions, ckd.generateKey)
|
||||
}
|
||||
|
||||
func (ckd *CommunitiesKeyDistributorImpl) Distribute(community *communities.Community, keyActions *communities.EncryptionKeyActions) error {
|
||||
if !community.IsControlNode() {
|
||||
return communities.ErrNotControlNode
|
||||
}
|
||||
return iterateActions(community, keyActions, ckd.distributeKey)
|
||||
}
|
||||
|
||||
func iterateActions(community *communities.Community, keyActions *communities.EncryptionKeyActions, fn func(community *communities.Community, hashRatchetGroupID []byte, keyAction *communities.EncryptionKeyAction) error) error {
|
||||
err := fn(community, community.ID(), &keyActions.CommunityKeyAction)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for channelID := range keyActions.ChannelKeysActions {
|
||||
keyAction := keyActions.ChannelKeysActions[channelID]
|
||||
err := fn(community, []byte(community.IDString()+channelID), &keyAction)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ckd *CommunitiesKeyDistributorImpl) generateKey(community *communities.Community, hashRatchetGroupID []byte, keyAction *communities.EncryptionKeyAction) error {
|
||||
if keyAction.ActionType != communities.EncryptionKeyAdd {
|
||||
return nil
|
||||
}
|
||||
_, err := ckd.encryptor.GenerateHashRatchetKey(hashRatchetGroupID)
|
||||
return err
|
||||
}
|
||||
|
||||
func (ckd *CommunitiesKeyDistributorImpl) distributeKey(community *communities.Community, hashRatchetGroupID []byte, keyAction *communities.EncryptionKeyAction) error {
|
||||
pubkeys := make([]*ecdsa.PublicKey, len(keyAction.Members))
|
||||
i := 0
|
||||
for hex := range keyAction.Members {
|
||||
pubkeys[i], _ = common.HexToPubkey(hex)
|
||||
i++
|
||||
}
|
||||
|
||||
switch keyAction.ActionType {
|
||||
case communities.EncryptionKeyAdd:
|
||||
// key must be already generated
|
||||
err := ckd.sendKeyExchangeMessage(community, hashRatchetGroupID, pubkeys, common.KeyExMsgReuse)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case communities.EncryptionKeyRekey:
|
||||
err := ckd.sendKeyExchangeMessage(community, hashRatchetGroupID, pubkeys, common.KeyExMsgRekey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case communities.EncryptionKeySendToMembers:
|
||||
err := ckd.sendKeyExchangeMessage(community, hashRatchetGroupID, pubkeys, common.KeyExMsgReuse)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ckd *CommunitiesKeyDistributorImpl) sendKeyExchangeMessage(community *communities.Community, hashRatchetGroupID []byte, pubkeys []*ecdsa.PublicKey, msgType common.CommKeyExMsgType) error {
|
||||
rawMessage := common.RawMessage{
|
||||
Sender: community.PrivateKey(),
|
||||
SkipEncryptionLayer: false,
|
||||
CommunityID: community.ID(),
|
||||
CommunityKeyExMsgType: msgType,
|
||||
Recipients: pubkeys,
|
||||
MessageType: protobuf.ApplicationMetadataMessage_CHAT_MESSAGE,
|
||||
HashRatchetGroupID: hashRatchetGroupID,
|
||||
PubsubTopic: community.PubsubTopic(), // TODO: confirm if it should be sent in community pubsub topic
|
||||
}
|
||||
_, err := ckd.sender.SendCommunityMessage(context.Background(), rawMessage)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
557
vendor/github.com/status-im/status-go/protocol/contact.go
generated
vendored
Normal file
557
vendor/github.com/status-im/status-go/protocol/contact.go
generated
vendored
Normal file
@@ -0,0 +1,557 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
accountJson "github.com/status-im/status-go/account/json"
|
||||
"github.com/status-im/status-go/api/multiformat"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/multiaccounts"
|
||||
"github.com/status-im/status-go/multiaccounts/accounts"
|
||||
"github.com/status-im/status-go/multiaccounts/settings"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/identity"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
"github.com/status-im/status-go/protocol/verification"
|
||||
)
|
||||
|
||||
type ContactRequestState int
|
||||
|
||||
const (
|
||||
ContactRequestStateNone ContactRequestState = iota
|
||||
ContactRequestStateMutual
|
||||
ContactRequestStateSent
|
||||
// Received is a confusing state, we should use
|
||||
// sent for both, since they are now stored in different
|
||||
// states
|
||||
ContactRequestStateReceived
|
||||
ContactRequestStateDismissed
|
||||
)
|
||||
|
||||
type MutualStateUpdateType int
|
||||
|
||||
const (
|
||||
MutualStateUpdateTypeSent MutualStateUpdateType = iota + 1
|
||||
MutualStateUpdateTypeAdded
|
||||
MutualStateUpdateTypeRemoved
|
||||
)
|
||||
|
||||
// ContactDeviceInfo is a struct containing information about a particular device owned by a contact
|
||||
type ContactDeviceInfo struct {
|
||||
// The installation id of the device
|
||||
InstallationID string `json:"id"`
|
||||
// Timestamp represents the last time we received this info
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
// FCMToken is to be used for push notifications
|
||||
FCMToken string `json:"fcmToken"`
|
||||
}
|
||||
|
||||
func (c *Contact) CanonicalImage(profilePicturesVisibility settings.ProfilePicturesVisibilityType) string {
|
||||
if profilePicturesVisibility == settings.ProfilePicturesVisibilityNone || (profilePicturesVisibility == settings.ProfilePicturesVisibilityContactsOnly && !c.added()) {
|
||||
return c.Identicon
|
||||
}
|
||||
|
||||
if largeImage, ok := c.Images[images.LargeDimName]; ok {
|
||||
imageBase64, err := largeImage.GetDataURI()
|
||||
if err == nil {
|
||||
return imageBase64
|
||||
}
|
||||
}
|
||||
|
||||
if thumbImage, ok := c.Images[images.SmallDimName]; ok {
|
||||
imageBase64, err := thumbImage.GetDataURI()
|
||||
if err == nil {
|
||||
return imageBase64
|
||||
}
|
||||
}
|
||||
|
||||
return c.Identicon
|
||||
}
|
||||
|
||||
type VerificationStatus int
|
||||
|
||||
const (
|
||||
VerificationStatusUNVERIFIED VerificationStatus = iota
|
||||
VerificationStatusVERIFYING
|
||||
VerificationStatusVERIFIED
|
||||
)
|
||||
|
||||
// Contact has information about a "Contact"
|
||||
type Contact struct {
|
||||
// ID of the contact. It's a hex-encoded public key (prefixed with 0x).
|
||||
ID string `json:"id"`
|
||||
// Ethereum address of the contact
|
||||
Address string `json:"address,omitempty"`
|
||||
// ENS name of contact
|
||||
EnsName string `json:"name,omitempty"`
|
||||
// EnsVerified whether we verified the name of the contact
|
||||
ENSVerified bool `json:"ensVerified"`
|
||||
// Generated username name of the contact
|
||||
Alias string `json:"alias,omitempty"`
|
||||
// Identicon generated from public key
|
||||
Identicon string `json:"identicon"`
|
||||
// LastUpdated is the last time we received an update from the contact
|
||||
// updates should be discarded if last updated is less than the one stored
|
||||
LastUpdated uint64 `json:"lastUpdated"`
|
||||
|
||||
// LastUpdatedLocally is the last time we updated the contact locally
|
||||
LastUpdatedLocally uint64 `json:"lastUpdatedLocally"`
|
||||
|
||||
LocalNickname string `json:"localNickname,omitempty"`
|
||||
|
||||
// Display name of the contact
|
||||
DisplayName string `json:"displayName"`
|
||||
|
||||
// Bio - description of the contact (tell us about yourself)
|
||||
Bio string `json:"bio"`
|
||||
|
||||
SocialLinks identity.SocialLinks `json:"socialLinks"`
|
||||
|
||||
Images map[string]images.IdentityImage `json:"images"`
|
||||
|
||||
Blocked bool `json:"blocked"`
|
||||
|
||||
// ContactRequestRemoteState is the state of the contact request
|
||||
// on the contact's end
|
||||
ContactRequestRemoteState ContactRequestState `json:"contactRequestRemoteState"`
|
||||
// ContactRequestRemoteClock is the clock for incoming contact requests
|
||||
ContactRequestRemoteClock uint64 `json:"contactRequestRemoteClock"`
|
||||
|
||||
// ContactRequestLocalState is the state of the contact request
|
||||
// on our end
|
||||
ContactRequestLocalState ContactRequestState `json:"contactRequestLocalState"`
|
||||
// ContactRequestLocalClock is the clock for outgoing contact requests
|
||||
ContactRequestLocalClock uint64 `json:"contactRequestLocalClock"`
|
||||
|
||||
IsSyncing bool
|
||||
Removed bool
|
||||
|
||||
VerificationStatus VerificationStatus `json:"verificationStatus"`
|
||||
TrustStatus verification.TrustStatus `json:"trustStatus"`
|
||||
}
|
||||
|
||||
func (c Contact) IsVerified() bool {
|
||||
return c.VerificationStatus == VerificationStatusVERIFIED
|
||||
}
|
||||
|
||||
func (c Contact) IsVerifying() bool {
|
||||
return c.VerificationStatus == VerificationStatusVERIFYING
|
||||
}
|
||||
|
||||
func (c Contact) IsUnverified() bool {
|
||||
return c.VerificationStatus == VerificationStatusUNVERIFIED
|
||||
}
|
||||
|
||||
func (c Contact) IsUntrustworthy() bool {
|
||||
return c.TrustStatus == verification.TrustStatusUNTRUSTWORTHY
|
||||
}
|
||||
|
||||
func (c Contact) IsTrusted() bool {
|
||||
return c.TrustStatus == verification.TrustStatusTRUSTED
|
||||
}
|
||||
|
||||
func (c Contact) PublicKey() (*ecdsa.PublicKey, error) {
|
||||
b, err := types.DecodeHex(c.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return crypto.UnmarshalPubkey(b)
|
||||
}
|
||||
|
||||
func (c *Contact) Block(clock uint64) {
|
||||
c.Blocked = true
|
||||
c.DismissContactRequest(clock)
|
||||
c.Removed = true
|
||||
}
|
||||
|
||||
func (c *Contact) BlockDesktop() {
|
||||
c.Blocked = true
|
||||
}
|
||||
|
||||
func (c *Contact) Unblock(clock uint64) {
|
||||
c.Blocked = false
|
||||
// Reset the contact request flow
|
||||
c.RetractContactRequest(clock)
|
||||
}
|
||||
|
||||
func (c *Contact) added() bool {
|
||||
return c.ContactRequestLocalState == ContactRequestStateSent
|
||||
}
|
||||
|
||||
func (c *Contact) hasAddedUs() bool {
|
||||
return c.ContactRequestRemoteState == ContactRequestStateReceived
|
||||
}
|
||||
|
||||
func (c *Contact) mutual() bool {
|
||||
return c.added() && c.hasAddedUs()
|
||||
}
|
||||
|
||||
func (c *Contact) active() bool {
|
||||
return c.mutual() && !c.Blocked
|
||||
}
|
||||
|
||||
func (c *Contact) dismissed() bool {
|
||||
return c.ContactRequestLocalState == ContactRequestStateDismissed
|
||||
}
|
||||
|
||||
func (c *Contact) names() []string {
|
||||
var names []string
|
||||
|
||||
if c.LocalNickname != "" {
|
||||
names = append(names, c.LocalNickname)
|
||||
}
|
||||
|
||||
if c.ENSVerified && len(c.EnsName) != 0 {
|
||||
names = append(names, c.EnsName)
|
||||
}
|
||||
|
||||
if c.DisplayName != "" {
|
||||
names = append(names, c.DisplayName)
|
||||
}
|
||||
|
||||
return append(names, c.Alias)
|
||||
|
||||
}
|
||||
|
||||
func (c *Contact) PrimaryName() string {
|
||||
return c.names()[0]
|
||||
}
|
||||
|
||||
func (c *Contact) SecondaryName() string {
|
||||
// Only shown if the user has a nickname
|
||||
if c.LocalNickname == "" {
|
||||
return ""
|
||||
}
|
||||
names := c.names()
|
||||
if len(names) > 1 {
|
||||
return names[1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type ContactRequestProcessingResponse struct {
|
||||
processed bool
|
||||
newContactRequestReceived bool
|
||||
sendBackState bool
|
||||
}
|
||||
|
||||
func (c *Contact) ContactRequestSent(clock uint64) ContactRequestProcessingResponse {
|
||||
if clock <= c.ContactRequestLocalClock {
|
||||
return ContactRequestProcessingResponse{}
|
||||
}
|
||||
|
||||
c.ContactRequestLocalClock = clock
|
||||
c.ContactRequestLocalState = ContactRequestStateSent
|
||||
|
||||
c.Removed = false
|
||||
|
||||
return ContactRequestProcessingResponse{processed: true}
|
||||
}
|
||||
|
||||
func (c *Contact) AcceptContactRequest(clock uint64) ContactRequestProcessingResponse {
|
||||
// We treat accept the same as sent, that's because accepting a contact
|
||||
// request that does not exist is possible if the instruction is coming from
|
||||
// a different device, we'd rather assume that a contact requested existed
|
||||
// and didn't reach our device than being in an inconsistent state
|
||||
return c.ContactRequestSent(clock)
|
||||
}
|
||||
|
||||
func (c *Contact) RetractContactRequest(clock uint64) ContactRequestProcessingResponse {
|
||||
if clock <= c.ContactRequestLocalClock {
|
||||
return ContactRequestProcessingResponse{}
|
||||
}
|
||||
|
||||
// This is a symmetric action, we set both local & remote clock
|
||||
// since we want everything before this point discarded, regardless
|
||||
// the side it was sent from
|
||||
c.ContactRequestLocalClock = clock
|
||||
c.ContactRequestLocalState = ContactRequestStateNone
|
||||
c.ContactRequestRemoteState = ContactRequestStateNone
|
||||
c.ContactRequestRemoteClock = clock
|
||||
c.Removed = true
|
||||
|
||||
return ContactRequestProcessingResponse{processed: true}
|
||||
}
|
||||
|
||||
func (c *Contact) DismissContactRequest(clock uint64) ContactRequestProcessingResponse {
|
||||
if clock <= c.ContactRequestLocalClock {
|
||||
return ContactRequestProcessingResponse{}
|
||||
}
|
||||
|
||||
c.ContactRequestLocalClock = clock
|
||||
c.ContactRequestLocalState = ContactRequestStateDismissed
|
||||
|
||||
return ContactRequestProcessingResponse{processed: true}
|
||||
}
|
||||
|
||||
// Remote actions
|
||||
|
||||
func (c *Contact) contactRequestRetracted(clock uint64, fromSyncing bool, r ContactRequestProcessingResponse) ContactRequestProcessingResponse {
|
||||
if clock <= c.ContactRequestRemoteClock {
|
||||
return r
|
||||
}
|
||||
|
||||
// This is a symmetric action, we set both local & remote clock
|
||||
// since we want everything before this point discarded, regardless
|
||||
// the side it was sent from. The only exception is when the contact
|
||||
// request has been explicitly dismissed, in which case we don't
|
||||
// change state
|
||||
if c.ContactRequestLocalState != ContactRequestStateDismissed && !fromSyncing {
|
||||
c.ContactRequestLocalClock = clock
|
||||
c.ContactRequestLocalState = ContactRequestStateNone
|
||||
}
|
||||
c.ContactRequestRemoteClock = clock
|
||||
c.ContactRequestRemoteState = ContactRequestStateNone
|
||||
r.processed = true
|
||||
return r
|
||||
}
|
||||
|
||||
func (c *Contact) ContactRequestRetracted(clock uint64, fromSyncing bool) ContactRequestProcessingResponse {
|
||||
return c.contactRequestRetracted(clock, fromSyncing, ContactRequestProcessingResponse{})
|
||||
}
|
||||
|
||||
func (c *Contact) contactRequestReceived(clock uint64, r ContactRequestProcessingResponse) ContactRequestProcessingResponse {
|
||||
if clock <= c.ContactRequestRemoteClock {
|
||||
return r
|
||||
}
|
||||
r.processed = true
|
||||
c.ContactRequestRemoteClock = clock
|
||||
switch c.ContactRequestRemoteState {
|
||||
case ContactRequestStateNone:
|
||||
r.newContactRequestReceived = true
|
||||
}
|
||||
c.ContactRequestRemoteState = ContactRequestStateReceived
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (c *Contact) ContactRequestReceived(clock uint64) ContactRequestProcessingResponse {
|
||||
return c.contactRequestReceived(clock, ContactRequestProcessingResponse{})
|
||||
}
|
||||
|
||||
func (c *Contact) ContactRequestAccepted(clock uint64) ContactRequestProcessingResponse {
|
||||
if clock <= c.ContactRequestRemoteClock {
|
||||
return ContactRequestProcessingResponse{}
|
||||
}
|
||||
// We treat received and accepted in the same way
|
||||
// since the intention is clear on the other side
|
||||
// and there's no difference
|
||||
return c.ContactRequestReceived(clock)
|
||||
}
|
||||
|
||||
func buildContactFromPkString(pkString string) (*Contact, error) {
|
||||
publicKeyBytes, err := types.DecodeHex(pkString)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
publicKey, err := crypto.UnmarshalPubkey(publicKeyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buildContact(pkString, publicKey)
|
||||
}
|
||||
|
||||
func BuildContactFromPublicKey(publicKey *ecdsa.PublicKey) (*Contact, error) {
|
||||
id := common.PubkeyToHex(publicKey)
|
||||
return buildContact(id, publicKey)
|
||||
}
|
||||
|
||||
func getShortenedCompressedKey(publicKey string) string {
|
||||
if len(publicKey) > 9 {
|
||||
firstPart := publicKey[0:3]
|
||||
ellipsis := "..."
|
||||
publicKeySize := len(publicKey)
|
||||
lastPart := publicKey[publicKeySize-6 : publicKeySize]
|
||||
abbreviatedKey := fmt.Sprintf("%s%s%s", firstPart, ellipsis, lastPart)
|
||||
return abbreviatedKey
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func buildContact(publicKeyString string, publicKey *ecdsa.PublicKey) (*Contact, error) {
|
||||
compressedKey, err := multiformat.SerializeLegacyKey(common.PubkeyToHex(publicKey))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
address := crypto.PubkeyToAddress(*publicKey)
|
||||
|
||||
contact := &Contact{
|
||||
ID: publicKeyString,
|
||||
Alias: getShortenedCompressedKey(compressedKey),
|
||||
Address: types.EncodeHex(address[:]),
|
||||
}
|
||||
|
||||
return contact, nil
|
||||
}
|
||||
|
||||
func buildSelfContact(identity *ecdsa.PrivateKey, settings *accounts.Database, multiAccounts *multiaccounts.Database, account *multiaccounts.Account) (*Contact, error) {
|
||||
myPublicKeyString := types.EncodeHex(crypto.FromECDSAPub(&identity.PublicKey))
|
||||
|
||||
c, err := buildContact(myPublicKeyString, &identity.PublicKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build contact: %w", err)
|
||||
}
|
||||
|
||||
if settings != nil {
|
||||
if s, err := settings.GetSettings(); err == nil {
|
||||
c.DisplayName = s.DisplayName
|
||||
c.Bio = s.Bio
|
||||
if s.PreferredName != nil {
|
||||
c.EnsName = *s.PreferredName
|
||||
}
|
||||
}
|
||||
if socialLinks, err := settings.GetSocialLinks(); err != nil {
|
||||
c.SocialLinks = socialLinks
|
||||
}
|
||||
}
|
||||
|
||||
if multiAccounts != nil && account != nil {
|
||||
if identityImages, err := multiAccounts.GetIdentityImages(account.KeyUID); err != nil {
|
||||
imagesMap := make(map[string]images.IdentityImage)
|
||||
for _, img := range identityImages {
|
||||
imagesMap[img.Name] = *img
|
||||
}
|
||||
|
||||
c.Images = imagesMap
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func contactIDFromPublicKey(key *ecdsa.PublicKey) string {
|
||||
return types.EncodeHex(crypto.FromECDSAPub(key))
|
||||
}
|
||||
|
||||
func contactIDFromPublicKeyString(key string) (string, error) {
|
||||
pubKey, err := common.HexToPubkey(key)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return contactIDFromPublicKey(pubKey), nil
|
||||
}
|
||||
|
||||
func (c *Contact) ProcessSyncContactRequestState(remoteState ContactRequestState, remoteClock uint64, localState ContactRequestState, localClock uint64) {
|
||||
// We process the two separately, first local state
|
||||
switch localState {
|
||||
case ContactRequestStateDismissed:
|
||||
c.DismissContactRequest(localClock)
|
||||
case ContactRequestStateNone:
|
||||
c.RetractContactRequest(localClock)
|
||||
case ContactRequestStateSent:
|
||||
c.ContactRequestSent(localClock)
|
||||
}
|
||||
|
||||
// and later remote state
|
||||
switch remoteState {
|
||||
case ContactRequestStateReceived:
|
||||
c.ContactRequestReceived(remoteClock)
|
||||
case ContactRequestStateNone:
|
||||
c.ContactRequestRetracted(remoteClock, true)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Contact) MarshalJSON() ([]byte, error) {
|
||||
type Alias Contact
|
||||
type ContactType struct {
|
||||
*Alias
|
||||
Added bool `json:"added"`
|
||||
ContactRequestState ContactRequestState `json:"contactRequestState"`
|
||||
HasAddedUs bool `json:"hasAddedUs"`
|
||||
Mutual bool `json:"mutual"`
|
||||
Active bool `json:"active"`
|
||||
PrimaryName string `json:"primaryName"`
|
||||
SecondaryName string `json:"secondaryName,omitempty"`
|
||||
}
|
||||
|
||||
item := ContactType{
|
||||
Alias: (*Alias)(c),
|
||||
}
|
||||
|
||||
item.Added = c.added()
|
||||
item.HasAddedUs = c.hasAddedUs()
|
||||
item.Mutual = c.mutual()
|
||||
item.Active = c.active()
|
||||
item.PrimaryName = c.PrimaryName()
|
||||
item.SecondaryName = c.SecondaryName()
|
||||
|
||||
if c.mutual() {
|
||||
item.ContactRequestState = ContactRequestStateMutual
|
||||
} else if c.dismissed() {
|
||||
item.ContactRequestState = ContactRequestStateDismissed
|
||||
} else if c.added() {
|
||||
item.ContactRequestState = ContactRequestStateSent
|
||||
} else if c.hasAddedUs() {
|
||||
item.ContactRequestState = ContactRequestStateReceived
|
||||
}
|
||||
ext, err := accountJson.ExtendStructWithPubKeyData(item.ID, item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return json.Marshal(ext)
|
||||
}
|
||||
|
||||
// ContactRequestPropagatedStateReceived handles the propagation of state from
|
||||
// the other end.
|
||||
func (c *Contact) ContactRequestPropagatedStateReceived(state *protobuf.ContactRequestPropagatedState) ContactRequestProcessingResponse {
|
||||
|
||||
// It's inverted, as their local states is our remote state
|
||||
expectedLocalState := ContactRequestState(state.RemoteState)
|
||||
expectedLocalClock := state.RemoteClock
|
||||
|
||||
remoteState := ContactRequestState(state.LocalState)
|
||||
remoteClock := state.LocalClock
|
||||
|
||||
response := ContactRequestProcessingResponse{}
|
||||
|
||||
// If we notice that the state is not consistent, and their clock is
|
||||
// outdated, we send back the state so they can catch up.
|
||||
if expectedLocalClock < c.ContactRequestLocalClock && expectedLocalState != c.ContactRequestLocalState {
|
||||
response.processed = true
|
||||
response.sendBackState = true
|
||||
}
|
||||
|
||||
// If they expect our state to be more up-to-date, we only
|
||||
// trust it if the state is set to None, in this case we can trust
|
||||
// it, since a retraction can be initiated by both parties
|
||||
if expectedLocalClock > c.ContactRequestLocalClock && c.ContactRequestLocalState != ContactRequestStateDismissed && expectedLocalState == ContactRequestStateNone {
|
||||
response.processed = true
|
||||
c.ContactRequestLocalClock = expectedLocalClock
|
||||
c.ContactRequestLocalState = ContactRequestStateNone
|
||||
// We set the remote state, as this was an implicit retraction
|
||||
// potentially, for example this could happen if they
|
||||
// sent a retraction earier, but we never received it,
|
||||
// or one of our paired devices has retracted the contact request
|
||||
// but we never synced with them.
|
||||
c.ContactRequestRemoteState = ContactRequestStateNone
|
||||
}
|
||||
|
||||
// We always trust this
|
||||
if remoteClock > c.ContactRequestRemoteClock {
|
||||
if remoteState == ContactRequestStateSent {
|
||||
response = c.contactRequestReceived(remoteClock, response)
|
||||
} else if remoteState == ContactRequestStateNone {
|
||||
response = c.contactRequestRetracted(remoteClock, false, response)
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
func (c *Contact) ContactRequestPropagatedState() *protobuf.ContactRequestPropagatedState {
|
||||
return &protobuf.ContactRequestPropagatedState{
|
||||
LocalClock: c.ContactRequestLocalClock,
|
||||
LocalState: uint64(c.ContactRequestLocalState),
|
||||
RemoteClock: c.ContactRequestRemoteClock,
|
||||
RemoteState: uint64(c.ContactRequestRemoteState),
|
||||
}
|
||||
}
|
||||
65
vendor/github.com/status-im/status-go/protocol/datasync/datasync.go
generated
vendored
Normal file
65
vendor/github.com/status-im/status-go/protocol/datasync/datasync.go
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
package datasync
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"errors"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
datasyncnode "github.com/status-im/mvds/node"
|
||||
"github.com/status-im/mvds/protobuf"
|
||||
datasyncproto "github.com/status-im/mvds/protobuf"
|
||||
datasynctransport "github.com/status-im/mvds/transport"
|
||||
"go.uber.org/zap"
|
||||
|
||||
datasyncpeer "github.com/status-im/status-go/protocol/datasync/peer"
|
||||
)
|
||||
|
||||
type DataSync struct {
|
||||
*datasyncnode.Node
|
||||
// NodeTransport is the implementation of the datasync transport interface.
|
||||
*NodeTransport
|
||||
logger *zap.Logger
|
||||
sendingEnabled bool
|
||||
}
|
||||
|
||||
func New(node *datasyncnode.Node, transport *NodeTransport, sendingEnabled bool, logger *zap.Logger) *DataSync {
|
||||
return &DataSync{Node: node, NodeTransport: transport, sendingEnabled: sendingEnabled, logger: logger}
|
||||
}
|
||||
|
||||
// Unwrap tries to unwrap datasync message and passes back the message to datasync in order to acknowledge any potential message and mark messages as acknowledged
|
||||
func (d *DataSync) Unwrap(sender *ecdsa.PublicKey, payload []byte) (*protobuf.Payload, error) {
|
||||
logger := d.logger.With(zap.String("site", "Handle"))
|
||||
|
||||
datasyncMessage, err := unwrap(payload)
|
||||
// If it failed to decode is not a protobuf message, if it successfully decoded but body is empty, is likedly a protobuf wrapped message
|
||||
if err != nil {
|
||||
logger.Debug("Unwrapping datasync message failed", zap.Error(err))
|
||||
return nil, err
|
||||
} else if !datasyncMessage.IsValid() {
|
||||
return nil, errors.New("handling non-datasync message")
|
||||
} else {
|
||||
logger.Debug("handling datasync message")
|
||||
if d.sendingEnabled {
|
||||
d.add(sender, &datasyncMessage)
|
||||
}
|
||||
}
|
||||
|
||||
return &datasyncMessage, nil
|
||||
}
|
||||
|
||||
func (d *DataSync) Stop() {
|
||||
d.Node.Stop()
|
||||
}
|
||||
|
||||
func (d *DataSync) add(publicKey *ecdsa.PublicKey, datasyncMessage *datasyncproto.Payload) {
|
||||
packet := datasynctransport.Packet{
|
||||
Sender: datasyncpeer.PublicKeyToPeerID(*publicKey),
|
||||
Payload: datasyncMessage,
|
||||
}
|
||||
d.NodeTransport.AddPacket(packet)
|
||||
}
|
||||
|
||||
func unwrap(payload []byte) (datasyncPayload datasyncproto.Payload, err error) {
|
||||
err = proto.Unmarshal(payload, &datasyncPayload)
|
||||
return
|
||||
}
|
||||
19
vendor/github.com/status-im/status-go/protocol/datasync/peer/utils.go
generated
vendored
Normal file
19
vendor/github.com/status-im/status-go/protocol/datasync/peer/utils.go
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
package peer
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/status-im/mvds/state"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
)
|
||||
|
||||
func PublicKeyToPeerID(k ecdsa.PublicKey) state.PeerID {
|
||||
var p state.PeerID
|
||||
copy(p[:], crypto.FromECDSAPub(&k))
|
||||
return p
|
||||
}
|
||||
|
||||
func IDToPublicKey(p state.PeerID) (*ecdsa.PublicKey, error) {
|
||||
return crypto.UnmarshalPubkey(p[:])
|
||||
}
|
||||
70
vendor/github.com/status-im/status-go/protocol/datasync/transport.go
generated
vendored
Normal file
70
vendor/github.com/status-im/status-go/protocol/datasync/transport.go
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
package datasync
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"math"
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/mvds/protobuf"
|
||||
"github.com/status-im/mvds/state"
|
||||
"github.com/status-im/mvds/transport"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
const backoffInterval = 30
|
||||
|
||||
var errNotInitialized = errors.New("datasync transport not initialized")
|
||||
var DatasyncTicker = 300 * time.Millisecond
|
||||
|
||||
// It's easier to calculate nextEpoch if we consider seconds as a unit rather than
|
||||
// 300 ms, so we multiply the result by the ratio
|
||||
var offsetToSecond = uint64(time.Second / DatasyncTicker)
|
||||
|
||||
type NodeTransport struct {
|
||||
packets chan transport.Packet
|
||||
logger *zap.Logger
|
||||
dispatch func(state.PeerID, *protobuf.Payload) error
|
||||
}
|
||||
|
||||
func NewNodeTransport() *NodeTransport {
|
||||
return &NodeTransport{
|
||||
packets: make(chan transport.Packet),
|
||||
}
|
||||
}
|
||||
|
||||
func (t *NodeTransport) Init(dispatch func(state.PeerID, *protobuf.Payload) error, logger *zap.Logger) {
|
||||
t.dispatch = dispatch
|
||||
t.logger = logger
|
||||
}
|
||||
|
||||
func (t *NodeTransport) AddPacket(p transport.Packet) {
|
||||
t.packets <- p
|
||||
}
|
||||
|
||||
func (t *NodeTransport) Watch() transport.Packet {
|
||||
return <-t.packets
|
||||
}
|
||||
|
||||
func (t *NodeTransport) Send(_ state.PeerID, peer state.PeerID, payload *protobuf.Payload) error {
|
||||
if t.dispatch == nil {
|
||||
return errNotInitialized
|
||||
}
|
||||
|
||||
// We don't return an error otherwise datasync will keep
|
||||
// re-trying sending at each epoch
|
||||
err := t.dispatch(peer, payload)
|
||||
if err != nil {
|
||||
t.logger.Error("failed to send message", zap.Error(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CalculateSendTime calculates the next epoch
|
||||
// at which a message should be sent.
|
||||
// We randomize it a bit so that not all messages are sent on the same epoch
|
||||
func CalculateSendTime(count uint64, time int64) int64 {
|
||||
return time + int64(uint64(math.Exp2(float64(count-1)))*backoffInterval*offsetToSecond) + int64(rand.Intn(30)) // nolint: gosec
|
||||
}
|
||||
30
vendor/github.com/status-im/status-go/protocol/datasync/utils.go
generated
vendored
Normal file
30
vendor/github.com/status-im/status-go/protocol/datasync/utils.go
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
package datasync
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/status-im/mvds/state"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
)
|
||||
|
||||
func ToGroupID(data []byte) state.GroupID {
|
||||
g := state.GroupID{}
|
||||
copy(g[:], data[:])
|
||||
return g
|
||||
}
|
||||
|
||||
// ToOneToOneGroupID returns a groupID for a onetoonechat, which is taken by
|
||||
// concatenating the bytes of the compressed keys, in ascending order by X
|
||||
func ToOneToOneGroupID(key1 *ecdsa.PublicKey, key2 *ecdsa.PublicKey) state.GroupID {
|
||||
pk1 := crypto.CompressPubkey(key1)
|
||||
pk2 := crypto.CompressPubkey(key2)
|
||||
var groupID []byte
|
||||
if key1.X.Cmp(key2.X) == -1 {
|
||||
groupID = append(pk1, pk2...)
|
||||
} else {
|
||||
groupID = append(pk2, pk1...)
|
||||
}
|
||||
|
||||
return ToGroupID(crypto.Keccak256(groupID))
|
||||
}
|
||||
51
vendor/github.com/status-im/status-go/protocol/delete_message.go
generated
vendored
Normal file
51
vendor/github.com/status-im/status-go/protocol/delete_message.go
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// DeleteMessage represents a delete of a message from a user in the application layer, used for persistence, querying and
|
||||
// signaling
|
||||
type DeleteMessage struct {
|
||||
*protobuf.DeleteMessage
|
||||
|
||||
// ID is the ID of the message that has been edited
|
||||
ID string `json:"id,omitempty"`
|
||||
|
||||
// From is a public key of the author of the edit reaction.
|
||||
From string `json:"from,omitempty"`
|
||||
|
||||
// SigPubKey is the ecdsa encoded public key of the edit author
|
||||
SigPubKey *ecdsa.PublicKey `json:"-"`
|
||||
}
|
||||
|
||||
func NewDeleteMessage() *DeleteMessage {
|
||||
return &DeleteMessage{DeleteMessage: &protobuf.DeleteMessage{}}
|
||||
}
|
||||
|
||||
// GetSigPubKey returns an ecdsa encoded public key
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *DeleteMessage) GetSigPubKey() *ecdsa.PublicKey {
|
||||
return e.SigPubKey
|
||||
}
|
||||
|
||||
// GetProtoBuf returns the struct's embedded protobuf struct
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *DeleteMessage) GetProtobuf() proto.Message {
|
||||
return e.DeleteMessage
|
||||
}
|
||||
|
||||
// SetMessageType a setter for the MessageType field
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *DeleteMessage) SetMessageType(messageType protobuf.MessageType) {
|
||||
e.MessageType = messageType
|
||||
}
|
||||
|
||||
// WrapGroupMessage indicates whether we should wrap this in membership information
|
||||
func (e *DeleteMessage) WrapGroupMessage() bool {
|
||||
return false
|
||||
}
|
||||
36
vendor/github.com/status-im/status-go/protocol/discord/assets.go
generated
vendored
Normal file
36
vendor/github.com/status-im/status-go/protocol/discord/assets.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
package discord
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/ethereum/go-ethereum/log"
|
||||
"github.com/status-im/status-go/images"
|
||||
)
|
||||
|
||||
func DownloadAvatarAsset(url string) ([]byte, error) {
|
||||
imgs, err := images.GenerateIdentityImagesFromURL(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
payload := imgs[0].Payload
|
||||
return payload, nil
|
||||
}
|
||||
|
||||
func DownloadAsset(url string) ([]byte, string, error) {
|
||||
client := http.Client{Timeout: time.Minute}
|
||||
res, err := client.Get(url)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
defer func() {
|
||||
if err := res.Body.Close(); err != nil {
|
||||
log.Error("failed to close message asset http request body", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
contentType := res.Header.Get("Content-Type")
|
||||
bodyBytes, err := ioutil.ReadAll(res.Body)
|
||||
return bodyBytes, contentType, err
|
||||
}
|
||||
275
vendor/github.com/status-im/status-go/protocol/discord/types.go
generated
vendored
Normal file
275
vendor/github.com/status-im/status-go/protocol/discord/types.go
generated
vendored
Normal file
@@ -0,0 +1,275 @@
|
||||
package discord
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type ErrorCodeType uint
|
||||
|
||||
const (
|
||||
NoErrorType ErrorCodeType = iota + 1
|
||||
WarningType
|
||||
ErrorType
|
||||
)
|
||||
|
||||
const MaxTaskErrorItemsCount = 3
|
||||
const MaxImportFileSizeBytes = 52428800
|
||||
|
||||
var (
|
||||
ErrNoChannelData = errors.New("No channels to import messages from")
|
||||
ErrNoMessageData = errors.New("No messages to import")
|
||||
ErrMarshalMessage = errors.New("Couldn't marshal discord message")
|
||||
ErrImportFileTooBig = fmt.Errorf("File is too big (max. %d MB)", MaxImportFileSizeBytes/1024/1024)
|
||||
)
|
||||
|
||||
type MessageType string
|
||||
|
||||
const (
|
||||
MessageTypeDefault MessageType = "Default"
|
||||
MessageTypeReply MessageType = "Reply"
|
||||
MessageTypeChannelPinned MessageType = "ChannelPinnedMessage"
|
||||
)
|
||||
|
||||
type ImportTask uint
|
||||
|
||||
const (
|
||||
CommunityCreationTask ImportTask = iota + 1
|
||||
ChannelsCreationTask
|
||||
ImportMessagesTask
|
||||
DownloadAssetsTask
|
||||
InitCommunityTask
|
||||
)
|
||||
|
||||
func (t ImportTask) String() string {
|
||||
switch t {
|
||||
case CommunityCreationTask:
|
||||
return "import.communityCreation"
|
||||
case ChannelsCreationTask:
|
||||
return "import.channelsCreation"
|
||||
case ImportMessagesTask:
|
||||
return "import.importMessages"
|
||||
case DownloadAssetsTask:
|
||||
return "import.downloadAssets"
|
||||
case InitCommunityTask:
|
||||
return "import.initializeCommunity"
|
||||
}
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
type ImportTaskState uint
|
||||
|
||||
const (
|
||||
TaskStateInitialized ImportTaskState = iota
|
||||
TaskStateSaving
|
||||
)
|
||||
|
||||
func (t ImportTaskState) String() string {
|
||||
switch t {
|
||||
case TaskStateInitialized:
|
||||
return "import.taskState.initialized"
|
||||
case TaskStateSaving:
|
||||
return "import.taskState.saving"
|
||||
}
|
||||
return "import.taskState.unknown"
|
||||
}
|
||||
|
||||
type Channel struct {
|
||||
ID string `json:"id"`
|
||||
CategoryName string `json:"category"`
|
||||
CategoryID string `json:"categoryId"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"topic"`
|
||||
FilePath string `json:"filePath"`
|
||||
}
|
||||
|
||||
type Category struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type ExportedData struct {
|
||||
Channel Channel `json:"channel"`
|
||||
Messages []*protobuf.DiscordMessage `json:"messages"`
|
||||
MessageCount int `json:"messageCount"`
|
||||
}
|
||||
|
||||
type ExtractedData struct {
|
||||
Categories map[string]*Category
|
||||
ExportedData []*ExportedData
|
||||
OldestMessageTimestamp int
|
||||
MessageCount int
|
||||
}
|
||||
|
||||
type ImportError struct {
|
||||
// This code is used to distinguish between errors
|
||||
// that are considered "criticial" and those that are not.
|
||||
//
|
||||
// Critical errors are the ones that prevent the imported community
|
||||
// from functioning properly. For example, if the creation of the community
|
||||
// or its categories and channels fails, this is a critical error.
|
||||
//
|
||||
// Non-critical errors are the ones that would not prevent the imported
|
||||
// community from functioning. For example, if the channel data to be imported
|
||||
// has no messages, or is not parsable.
|
||||
Code ErrorCodeType `json:"code"`
|
||||
Message string `json:"message"`
|
||||
TaskInfo string `json:"taskInfo"`
|
||||
}
|
||||
|
||||
func (d ImportError) Error() string {
|
||||
return fmt.Sprintf("%d: %s", d.Code, d.Message)
|
||||
}
|
||||
|
||||
func Error(message string) *ImportError {
|
||||
return &ImportError{
|
||||
Message: message,
|
||||
Code: ErrorType,
|
||||
}
|
||||
}
|
||||
|
||||
func Warning(message string) *ImportError {
|
||||
return &ImportError{
|
||||
Message: message,
|
||||
Code: WarningType,
|
||||
}
|
||||
}
|
||||
|
||||
type ImportTaskProgress struct {
|
||||
Type string `json:"type"`
|
||||
Progress float32 `json:"progress"`
|
||||
Errors []*ImportError `json:"errors"`
|
||||
Stopped bool `json:"stopped"`
|
||||
ErrorsCount uint `json:"errorsCount"`
|
||||
WarningsCount uint `json:"warningsCount"`
|
||||
State string `json:"state"`
|
||||
}
|
||||
|
||||
type ImportTasks map[ImportTask]*ImportTaskProgress
|
||||
|
||||
type ImportProgress struct {
|
||||
CommunityID string `json:"communityId,omitempty"`
|
||||
CommunityName string `json:"communityName"`
|
||||
ChannelID string `json:"channelId"`
|
||||
ChannelName string `json:"channelName"`
|
||||
CommunityImages map[string]images.IdentityImage `json:"communityImages"`
|
||||
Tasks []*ImportTaskProgress `json:"tasks"`
|
||||
Progress float32 `json:"progress"`
|
||||
ErrorsCount uint `json:"errorsCount"`
|
||||
WarningsCount uint `json:"warningsCount"`
|
||||
Stopped bool `json:"stopped"`
|
||||
TotalChunkCount int `json:"totalChunksCount,omitempty"`
|
||||
CurrentChunk int `json:"currentChunk,omitempty"`
|
||||
m sync.Mutex
|
||||
}
|
||||
|
||||
func (progress *ImportProgress) Init(totalChunkCount int, tasks []ImportTask) {
|
||||
progress.Progress = 0
|
||||
progress.Tasks = make([]*ImportTaskProgress, 0)
|
||||
for _, task := range tasks {
|
||||
progress.Tasks = append(progress.Tasks, &ImportTaskProgress{
|
||||
Type: task.String(),
|
||||
Progress: 0,
|
||||
Errors: []*ImportError{},
|
||||
Stopped: false,
|
||||
ErrorsCount: 0,
|
||||
WarningsCount: 0,
|
||||
State: TaskStateInitialized.String(),
|
||||
})
|
||||
}
|
||||
progress.ErrorsCount = 0
|
||||
progress.WarningsCount = 0
|
||||
progress.Stopped = false
|
||||
progress.TotalChunkCount = totalChunkCount
|
||||
progress.CurrentChunk = 0
|
||||
}
|
||||
|
||||
func (progress *ImportProgress) Stop() {
|
||||
progress.Stopped = true
|
||||
}
|
||||
|
||||
func (progress *ImportProgress) AddTaskError(task ImportTask, err *ImportError) {
|
||||
progress.m.Lock()
|
||||
defer progress.m.Unlock()
|
||||
|
||||
for i, t := range progress.Tasks {
|
||||
if t.Type == task.String() {
|
||||
errorsAndWarningsCount := progress.Tasks[i].ErrorsCount + progress.Tasks[i].WarningsCount
|
||||
if (errorsAndWarningsCount < MaxTaskErrorItemsCount) || err.Code > WarningType {
|
||||
errors := progress.Tasks[i].Errors
|
||||
progress.Tasks[i].Errors = append(errors, err)
|
||||
}
|
||||
if err.Code > WarningType {
|
||||
progress.Tasks[i].ErrorsCount++
|
||||
}
|
||||
if err.Code > NoErrorType {
|
||||
progress.Tasks[i].WarningsCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
if err.Code > WarningType {
|
||||
progress.ErrorsCount++
|
||||
return
|
||||
}
|
||||
if err.Code > NoErrorType {
|
||||
progress.WarningsCount++
|
||||
}
|
||||
}
|
||||
|
||||
func (progress *ImportProgress) StopTask(task ImportTask) {
|
||||
progress.m.Lock()
|
||||
defer progress.m.Unlock()
|
||||
for i, t := range progress.Tasks {
|
||||
if t.Type == task.String() {
|
||||
progress.Tasks[i].Stopped = true
|
||||
}
|
||||
}
|
||||
progress.Stop()
|
||||
}
|
||||
|
||||
func (progress *ImportProgress) UpdateTaskProgress(task ImportTask, value float32) {
|
||||
progress.m.Lock()
|
||||
defer progress.m.Unlock()
|
||||
for i, t := range progress.Tasks {
|
||||
if t.Type == task.String() {
|
||||
progress.Tasks[i].Progress = value
|
||||
}
|
||||
}
|
||||
sum := float32(0)
|
||||
for _, t := range progress.Tasks {
|
||||
sum = sum + t.Progress
|
||||
}
|
||||
// Update total progress now that sub progress has changed
|
||||
progress.Progress = sum / float32(len(progress.Tasks))
|
||||
}
|
||||
|
||||
func (progress *ImportProgress) UpdateTaskState(task ImportTask, state ImportTaskState) {
|
||||
progress.m.Lock()
|
||||
defer progress.m.Unlock()
|
||||
for i, t := range progress.Tasks {
|
||||
if t.Type == task.String() {
|
||||
progress.Tasks[i].State = state.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type AssetCounter struct {
|
||||
m sync.RWMutex
|
||||
v uint64
|
||||
}
|
||||
|
||||
func (c *AssetCounter) Value() uint64 {
|
||||
c.m.RLock()
|
||||
defer c.m.RUnlock()
|
||||
return c.v
|
||||
}
|
||||
|
||||
func (c *AssetCounter) Increase() {
|
||||
c.m.Lock()
|
||||
c.v++
|
||||
c.m.Unlock()
|
||||
}
|
||||
54
vendor/github.com/status-im/status-go/protocol/edit_message.go
generated
vendored
Normal file
54
vendor/github.com/status-im/status-go/protocol/edit_message.go
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// EditMessage represents an edit of a message from a user in the application layer, used for persistence, querying and
|
||||
// signaling
|
||||
type EditMessage struct {
|
||||
*protobuf.EditMessage
|
||||
|
||||
// ID is the ID of the message that has been edited
|
||||
ID string `json:"id,omitempty"`
|
||||
|
||||
// From is a public key of the author of the edit reaction.
|
||||
From string `json:"from,omitempty"`
|
||||
|
||||
// SigPubKey is the ecdsa encoded public key of the edit author
|
||||
SigPubKey *ecdsa.PublicKey `json:"-"`
|
||||
|
||||
// LocalChatID is the chatID of the local chat (one-to-one are not symmetric)
|
||||
LocalChatID string `json:"localChatId"`
|
||||
}
|
||||
|
||||
func NewEditMessage() *EditMessage {
|
||||
return &EditMessage{EditMessage: &protobuf.EditMessage{}}
|
||||
}
|
||||
|
||||
// GetSigPubKey returns an ecdsa encoded public key
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *EditMessage) GetSigPubKey() *ecdsa.PublicKey {
|
||||
return e.SigPubKey
|
||||
}
|
||||
|
||||
// GetProtoBuf returns the struct's embedded protobuf struct
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *EditMessage) GetProtobuf() proto.Message {
|
||||
return e.EditMessage
|
||||
}
|
||||
|
||||
// SetMessageType a setter for the MessageType field
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *EditMessage) SetMessageType(messageType protobuf.MessageType) {
|
||||
e.MessageType = messageType
|
||||
}
|
||||
|
||||
// WrapGroupMessage indicates whether we should wrap this in membership information
|
||||
func (e *EditMessage) WrapGroupMessage() bool {
|
||||
return false
|
||||
}
|
||||
93
vendor/github.com/status-im/status-go/protocol/emoji_reaction.go
generated
vendored
Normal file
93
vendor/github.com/status-im/status-go/protocol/emoji_reaction.go
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
accountJson "github.com/status-im/status-go/account/json"
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// EmojiReaction represents an emoji reaction from a user in the application layer, used for persistence, querying and
|
||||
// signaling
|
||||
type EmojiReaction struct {
|
||||
*protobuf.EmojiReaction
|
||||
|
||||
// From is a public key of the author of the emoji reaction.
|
||||
From string `json:"from,omitempty"`
|
||||
|
||||
// SigPubKey is the ecdsa encoded public key of the emoji reaction author
|
||||
SigPubKey *ecdsa.PublicKey `json:"-"`
|
||||
|
||||
// LocalChatID is the chatID of the local chat (one-to-one are not symmetric)
|
||||
LocalChatID string `json:"localChatId"`
|
||||
}
|
||||
|
||||
func NewEmojiReaction() *EmojiReaction {
|
||||
return &EmojiReaction{EmojiReaction: &protobuf.EmojiReaction{}}
|
||||
}
|
||||
|
||||
// ID is the Keccak256() contatenation of From-MessageID-EmojiType
|
||||
func (e *EmojiReaction) ID() string {
|
||||
return types.EncodeHex(crypto.Keccak256([]byte(fmt.Sprintf("%s%s%d", e.From, e.MessageId, e.Type))))
|
||||
}
|
||||
|
||||
// GetSigPubKey returns an ecdsa encoded public key
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *EmojiReaction) GetSigPubKey() *ecdsa.PublicKey {
|
||||
return e.SigPubKey
|
||||
}
|
||||
|
||||
// GetProtoBuf returns the struct's embedded protobuf struct
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *EmojiReaction) GetProtobuf() proto.Message {
|
||||
return e.EmojiReaction
|
||||
}
|
||||
|
||||
// SetMessageType a setter for the MessageType field
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (e *EmojiReaction) SetMessageType(messageType protobuf.MessageType) {
|
||||
e.MessageType = messageType
|
||||
}
|
||||
|
||||
func (e *EmojiReaction) MarshalJSON() ([]byte, error) {
|
||||
item := struct {
|
||||
ID string `json:"id"`
|
||||
Clock uint64 `json:"clock,omitempty"`
|
||||
ChatID string `json:"chatId,omitempty"`
|
||||
LocalChatID string `json:"localChatId,omitempty"`
|
||||
From string `json:"from"`
|
||||
MessageID string `json:"messageId,omitempty"`
|
||||
MessageType protobuf.MessageType `json:"messageType,omitempty"`
|
||||
Retracted bool `json:"retracted,omitempty"`
|
||||
EmojiID protobuf.EmojiReaction_Type `json:"emojiId,omitempty"`
|
||||
}{
|
||||
|
||||
ID: e.ID(),
|
||||
Clock: e.Clock,
|
||||
ChatID: e.ChatId,
|
||||
LocalChatID: e.LocalChatID,
|
||||
From: e.From,
|
||||
MessageID: e.MessageId,
|
||||
MessageType: e.MessageType,
|
||||
Retracted: e.Retracted,
|
||||
EmojiID: e.Type,
|
||||
}
|
||||
|
||||
ext, err := accountJson.ExtendStructWithPubKeyData(item.From, item)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return json.Marshal(ext)
|
||||
}
|
||||
|
||||
// WrapGroupMessage indicates whether we should wrap this in membership information
|
||||
func (e *EmojiReaction) WrapGroupMessage() bool {
|
||||
return false
|
||||
}
|
||||
12
vendor/github.com/status-im/status-go/protocol/encryption/README.md
generated
vendored
Normal file
12
vendor/github.com/status-im/status-go/protocol/encryption/README.md
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# protocol/encryption package
|
||||
|
||||
## Hash ratchet encryption
|
||||
`encryptor.GenerateHashRatchetKey()` generates a hash ratchet key and stores it in in the DB.
|
||||
There, 2 new tables are created: `hash_ratchet_encryption` and `hash_ratchet_encryption_cache`.
|
||||
Each hash ratchet key is uniquely identified by the `(groupId, keyId)` pair, where `keyId` is derived from a clock value.
|
||||
|
||||
`protocol.BuildHashRatchetKeyExchangeMessage` builds an 1-on-1 message containing the hash ratchet key, given it's ID.
|
||||
|
||||
`protocol.BuildHashRatchetMessage` builds a hash ratchet message with arbitrary payload, given `groupId`. It will use the latest hash ratchet key available. `encryptor.encryptWithHR` encrypts the payload using Hash Ratchet algorithms. Intermediate hashes are stored in `hash_ratchet_encryption_cache` table.
|
||||
|
||||
`protocol.HandleMessage` uses `encryptor.decryptWithHR` fn for decryption.
|
||||
763
vendor/github.com/status-im/status-go/protocol/encryption/encryptor.go
generated
vendored
Normal file
763
vendor/github.com/status-im/status-go/protocol/encryption/encryptor.go
generated
vendored
Normal file
@@ -0,0 +1,763 @@
|
||||
package encryption
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
dr "github.com/status-im/doubleratchet"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/crypto/ecies"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
|
||||
"github.com/status-im/status-go/protocol/encryption/multidevice"
|
||||
)
|
||||
|
||||
var (
|
||||
errSessionNotFound = errors.New("session not found")
|
||||
ErrDeviceNotFound = errors.New("device not found")
|
||||
// ErrNotPairedDevice means that we received a message signed with our public key
|
||||
// but from a device that has not been paired.
|
||||
// This should not happen because the protocol forbids sending a message to
|
||||
// non-paired devices, however, in theory it is possible to receive such a message.
|
||||
ErrNotPairedDevice = errors.New("received a message from not paired device")
|
||||
ErrHashRatchetSeqNoTooHigh = errors.New("Hash ratchet seq no is too high")
|
||||
ErrHashRatchetGroupIDNotFound = errors.New("Hash ratchet group id not found")
|
||||
ErrNoEncryptionKey = errors.New("no encryption key found for the community")
|
||||
)
|
||||
|
||||
// If we have no bundles, we use a constant so that the message can reach any device.
|
||||
const (
|
||||
noInstallationID = "none"
|
||||
maxHashRatchetSeqNoDelta = 100000
|
||||
)
|
||||
|
||||
type confirmationData struct {
|
||||
header *dr.MessageHeader
|
||||
drInfo *RatchetInfo
|
||||
}
|
||||
|
||||
// encryptor defines a service that is responsible for the encryption aspect of the protocol.
|
||||
type encryptor struct {
|
||||
persistence *sqlitePersistence
|
||||
config encryptorConfig
|
||||
messageIDs map[string]*confirmationData
|
||||
mutex sync.Mutex
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
type encryptorConfig struct {
|
||||
InstallationID string
|
||||
// Max number of installations we keep synchronized.
|
||||
MaxInstallations int
|
||||
// How many consecutive messages can be skipped in the receiving chain.
|
||||
MaxSkip int
|
||||
// Any message with seqNo <= currentSeq - maxKeep will be deleted.
|
||||
MaxKeep int
|
||||
// How many keys do we store in total per session.
|
||||
MaxMessageKeysPerSession int
|
||||
// How long before we refresh the interval in milliseconds
|
||||
BundleRefreshInterval int64
|
||||
// The logging object
|
||||
Logger *zap.Logger
|
||||
}
|
||||
|
||||
// defaultEncryptorConfig returns the default values used by the encryption service
|
||||
func defaultEncryptorConfig(installationID string, logger *zap.Logger) encryptorConfig {
|
||||
if logger == nil {
|
||||
logger = zap.NewNop()
|
||||
}
|
||||
|
||||
return encryptorConfig{
|
||||
MaxInstallations: 3,
|
||||
MaxSkip: 1000,
|
||||
MaxKeep: 3000,
|
||||
MaxMessageKeysPerSession: 2000,
|
||||
BundleRefreshInterval: 24 * 60 * 60 * 1000,
|
||||
InstallationID: installationID,
|
||||
Logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
// newEncryptor creates a new EncryptionService instance.
|
||||
func newEncryptor(db *sql.DB, config encryptorConfig) *encryptor {
|
||||
return &encryptor{
|
||||
persistence: newSQLitePersistence(db),
|
||||
config: config,
|
||||
messageIDs: make(map[string]*confirmationData),
|
||||
logger: config.Logger.With(zap.Namespace("encryptor")),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *encryptor) keyFromActiveX3DH(theirIdentityKey []byte, theirSignedPreKey []byte, myIdentityKey *ecdsa.PrivateKey) ([]byte, *ecdsa.PublicKey, error) {
|
||||
sharedKey, ephemeralPubKey, err := PerformActiveX3DH(theirIdentityKey, theirSignedPreKey, myIdentityKey)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return sharedKey, ephemeralPubKey, nil
|
||||
}
|
||||
|
||||
func (s *encryptor) getDRSession(id []byte) (dr.Session, error) {
|
||||
sessionStorage := s.persistence.SessionStorage()
|
||||
return dr.Load(
|
||||
id,
|
||||
sessionStorage,
|
||||
dr.WithKeysStorage(s.persistence.KeysStorage()),
|
||||
dr.WithMaxSkip(s.config.MaxSkip),
|
||||
dr.WithMaxKeep(s.config.MaxKeep),
|
||||
dr.WithMaxMessageKeysPerSession(s.config.MaxMessageKeysPerSession),
|
||||
dr.WithCrypto(crypto.EthereumCrypto{}),
|
||||
)
|
||||
}
|
||||
|
||||
func confirmationIDString(id []byte) string {
|
||||
return hex.EncodeToString(id)
|
||||
}
|
||||
|
||||
// ConfirmMessagesProcessed confirms and deletes message keys for the given messages
|
||||
func (s *encryptor) ConfirmMessageProcessed(messageID []byte) error {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
id := confirmationIDString(messageID)
|
||||
confirmationData, ok := s.messageIDs[id]
|
||||
if !ok {
|
||||
s.logger.Debug("could not confirm message or message already confirmed", zap.String("messageID", id))
|
||||
// We are ok with this, means no key material is stored (public message, or already confirmed)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Load session from store first
|
||||
session, err := s.getDRSession(confirmationData.drInfo.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := session.DeleteMk(confirmationData.header.DH, confirmationData.header.N); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Clean up
|
||||
delete(s.messageIDs, id)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateBundle retrieves or creates an X3DH bundle given a private key
|
||||
func (s *encryptor) CreateBundle(privateKey *ecdsa.PrivateKey, installations []*multidevice.Installation) (*Bundle, error) {
|
||||
ourIdentityKeyC := crypto.CompressPubkey(&privateKey.PublicKey)
|
||||
|
||||
bundleContainer, err := s.persistence.GetAnyPrivateBundle(ourIdentityKeyC, installations)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
expired := bundleContainer != nil && bundleContainer.GetBundle().Timestamp < time.Now().Add(-1*time.Duration(s.config.BundleRefreshInterval)*time.Millisecond).UnixNano()
|
||||
|
||||
// If the bundle has expired we create a new one
|
||||
if expired {
|
||||
// Mark sessions has expired
|
||||
if err := s.persistence.MarkBundleExpired(bundleContainer.GetBundle().GetIdentity()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
} else if bundleContainer != nil {
|
||||
err = SignBundle(privateKey, bundleContainer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return bundleContainer.GetBundle(), nil
|
||||
}
|
||||
|
||||
// needs transaction/mutex to avoid creating multiple bundles
|
||||
// although not a problem
|
||||
bundleContainer, err = NewBundleContainer(privateKey, s.config.InstallationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = s.persistence.AddPrivateBundle(bundleContainer); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.CreateBundle(privateKey, installations)
|
||||
}
|
||||
|
||||
// DecryptWithDH decrypts message sent with a DH key exchange, and throws away the key after decryption
|
||||
func (s *encryptor) DecryptWithDH(myIdentityKey *ecdsa.PrivateKey, theirEphemeralKey *ecdsa.PublicKey, payload []byte) ([]byte, error) {
|
||||
key, err := PerformDH(
|
||||
ecies.ImportECDSA(myIdentityKey),
|
||||
ecies.ImportECDSAPublic(theirEphemeralKey),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return crypto.DecryptSymmetric(key, payload)
|
||||
|
||||
}
|
||||
|
||||
// keyFromPassiveX3DH decrypts message sent with a X3DH key exchange, storing the key for future exchanges
|
||||
func (s *encryptor) keyFromPassiveX3DH(myIdentityKey *ecdsa.PrivateKey, theirIdentityKey *ecdsa.PublicKey, theirEphemeralKey *ecdsa.PublicKey, ourBundleID []byte) ([]byte, error) {
|
||||
bundlePrivateKey, err := s.persistence.GetPrivateKeyBundle(ourBundleID)
|
||||
if err != nil {
|
||||
s.logger.Error("could not get private bundle", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if bundlePrivateKey == nil {
|
||||
return nil, errSessionNotFound
|
||||
}
|
||||
|
||||
signedPreKey, err := crypto.ToECDSA(bundlePrivateKey)
|
||||
if err != nil {
|
||||
s.logger.Error("could not convert to ecdsa", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
key, err := PerformPassiveX3DH(
|
||||
theirIdentityKey,
|
||||
signedPreKey,
|
||||
theirEphemeralKey,
|
||||
myIdentityKey,
|
||||
)
|
||||
if err != nil {
|
||||
s.logger.Error("could not perform passive x3dh", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
return key, nil
|
||||
}
|
||||
|
||||
// ProcessPublicBundle persists a bundle
|
||||
func (s *encryptor) ProcessPublicBundle(myIdentityKey *ecdsa.PrivateKey, b *Bundle) error {
|
||||
return s.persistence.AddPublicBundle(b)
|
||||
}
|
||||
|
||||
func (s *encryptor) GetMessage(msgs map[string]*EncryptedMessageProtocol) *EncryptedMessageProtocol {
|
||||
msg := msgs[s.config.InstallationID]
|
||||
if msg == nil {
|
||||
msg = msgs[noInstallationID]
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
// DecryptPayload decrypts the payload of a EncryptedMessageProtocol, given an identity private key and the sender's public key
|
||||
func (s *encryptor) DecryptPayload(myIdentityKey *ecdsa.PrivateKey, theirIdentityKey *ecdsa.PublicKey, theirInstallationID string, msgs map[string]*EncryptedMessageProtocol, messageID []byte) ([]byte, error) {
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
msg := s.GetMessage(msgs)
|
||||
|
||||
// We should not be sending a signal if it's coming from us, as we receive our own messages
|
||||
if msg == nil && !samePublicKeys(*theirIdentityKey, myIdentityKey.PublicKey) {
|
||||
s.logger.Debug("message is coming from someone else, but not targeting our installation id")
|
||||
return nil, ErrDeviceNotFound
|
||||
} else if msg == nil && theirInstallationID != s.config.InstallationID {
|
||||
s.logger.Debug("message is coming from same public key, but different installation id")
|
||||
return nil, ErrNotPairedDevice
|
||||
} else if msg == nil && theirInstallationID == s.config.InstallationID {
|
||||
s.logger.Debug("message is coming from us and is nil")
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
payload := msg.GetPayload()
|
||||
|
||||
if x3dhHeader := msg.GetX3DHHeader(); x3dhHeader != nil {
|
||||
bundleID := x3dhHeader.GetId()
|
||||
theirEphemeralKey, err := crypto.DecompressPubkey(x3dhHeader.GetKey())
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
symmetricKey, err := s.keyFromPassiveX3DH(myIdentityKey, theirIdentityKey, theirEphemeralKey, bundleID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
theirIdentityKeyC := crypto.CompressPubkey(theirIdentityKey)
|
||||
err = s.persistence.AddRatchetInfo(symmetricKey, theirIdentityKeyC, bundleID, nil, theirInstallationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if drHeader := msg.GetDRHeader(); drHeader != nil {
|
||||
drMessage := &dr.Message{
|
||||
Header: dr.MessageHeader{
|
||||
N: drHeader.GetN(),
|
||||
PN: drHeader.GetPn(),
|
||||
DH: drHeader.GetKey(),
|
||||
},
|
||||
Ciphertext: msg.GetPayload(),
|
||||
}
|
||||
|
||||
theirIdentityKeyC := crypto.CompressPubkey(theirIdentityKey)
|
||||
|
||||
drInfo, err := s.persistence.GetRatchetInfo(drHeader.GetId(), theirIdentityKeyC, theirInstallationID)
|
||||
if err != nil {
|
||||
s.logger.Error("could not get ratchet info", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// We mark the exchange as successful so we stop sending x3dh header
|
||||
if err = s.persistence.RatchetInfoConfirmed(drHeader.GetId(), theirIdentityKeyC, theirInstallationID); err != nil {
|
||||
s.logger.Error("could not confirm ratchet info", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if drInfo == nil {
|
||||
s.logger.Error("could not find a session")
|
||||
return nil, errSessionNotFound
|
||||
}
|
||||
|
||||
confirmationData := &confirmationData{
|
||||
header: &drMessage.Header,
|
||||
drInfo: drInfo,
|
||||
}
|
||||
s.messageIDs[confirmationIDString(messageID)] = confirmationData
|
||||
|
||||
return s.decryptUsingDR(theirIdentityKey, drInfo, drMessage)
|
||||
}
|
||||
|
||||
// Try DH
|
||||
if header := msg.GetDHHeader(); header != nil {
|
||||
decompressedKey, err := crypto.DecompressPubkey(header.GetKey())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.DecryptWithDH(myIdentityKey, decompressedKey, payload)
|
||||
}
|
||||
|
||||
// Try Hash Ratchet
|
||||
if header := msg.GetHRHeader(); header != nil {
|
||||
|
||||
ratchet := &HashRatchetKeyCompatibility{
|
||||
GroupID: header.GroupId,
|
||||
// NOTE: this would be nil in the old format
|
||||
keyID: header.KeyId,
|
||||
}
|
||||
|
||||
// Old key format
|
||||
if header.DeprecatedKeyId != 0 {
|
||||
ratchet.Timestamp = uint64(header.DeprecatedKeyId)
|
||||
}
|
||||
|
||||
decryptedPayload, err := s.DecryptWithHR(ratchet, header.SeqNo, payload)
|
||||
|
||||
return decryptedPayload, err
|
||||
}
|
||||
return nil, errors.New("no key specified")
|
||||
}
|
||||
|
||||
func (s *encryptor) createNewSession(drInfo *RatchetInfo, sk []byte, keyPair crypto.DHPair) (dr.Session, error) {
|
||||
var err error
|
||||
var session dr.Session
|
||||
|
||||
if drInfo.PrivateKey != nil {
|
||||
session, err = dr.New(
|
||||
drInfo.ID,
|
||||
sk,
|
||||
keyPair,
|
||||
s.persistence.SessionStorage(),
|
||||
dr.WithKeysStorage(s.persistence.KeysStorage()),
|
||||
dr.WithMaxSkip(s.config.MaxSkip),
|
||||
dr.WithMaxKeep(s.config.MaxKeep),
|
||||
dr.WithMaxMessageKeysPerSession(s.config.MaxMessageKeysPerSession),
|
||||
dr.WithCrypto(crypto.EthereumCrypto{}))
|
||||
} else {
|
||||
session, err = dr.NewWithRemoteKey(
|
||||
drInfo.ID,
|
||||
sk,
|
||||
keyPair.PubKey,
|
||||
s.persistence.SessionStorage(),
|
||||
dr.WithKeysStorage(s.persistence.KeysStorage()),
|
||||
dr.WithMaxSkip(s.config.MaxSkip),
|
||||
dr.WithMaxKeep(s.config.MaxKeep),
|
||||
dr.WithMaxMessageKeysPerSession(s.config.MaxMessageKeysPerSession),
|
||||
dr.WithCrypto(crypto.EthereumCrypto{}))
|
||||
}
|
||||
|
||||
return session, err
|
||||
}
|
||||
|
||||
func (s *encryptor) encryptUsingDR(theirIdentityKey *ecdsa.PublicKey, drInfo *RatchetInfo, payload []byte) ([]byte, *DRHeader, error) {
|
||||
var err error
|
||||
|
||||
var session dr.Session
|
||||
|
||||
keyPair := crypto.DHPair{
|
||||
PrvKey: drInfo.PrivateKey,
|
||||
PubKey: drInfo.PublicKey,
|
||||
}
|
||||
|
||||
// Load session from store first
|
||||
session, err = s.getDRSession(drInfo.ID)
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Create a new one
|
||||
if session == nil {
|
||||
session, err = s.createNewSession(drInfo, drInfo.Sk, keyPair)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
response, err := session.RatchetEncrypt(payload, nil)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
header := &DRHeader{
|
||||
Id: drInfo.BundleID,
|
||||
Key: response.Header.DH[:],
|
||||
N: response.Header.N,
|
||||
Pn: response.Header.PN,
|
||||
}
|
||||
|
||||
return response.Ciphertext, header, nil
|
||||
}
|
||||
|
||||
func (s *encryptor) decryptUsingDR(theirIdentityKey *ecdsa.PublicKey, drInfo *RatchetInfo, payload *dr.Message) ([]byte, error) {
|
||||
var err error
|
||||
|
||||
var session dr.Session
|
||||
|
||||
keyPair := crypto.DHPair{
|
||||
PrvKey: drInfo.PrivateKey,
|
||||
PubKey: drInfo.PublicKey,
|
||||
}
|
||||
|
||||
session, err = s.getDRSession(drInfo.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if session == nil {
|
||||
session, err = s.createNewSession(drInfo, drInfo.Sk, keyPair)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
plaintext, err := session.RatchetDecrypt(*payload, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return plaintext, nil
|
||||
}
|
||||
|
||||
func (s *encryptor) encryptWithDH(theirIdentityKey *ecdsa.PublicKey, payload []byte) (*EncryptedMessageProtocol, error) {
|
||||
symmetricKey, ourEphemeralKey, err := PerformActiveDH(theirIdentityKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
encryptedPayload, err := crypto.EncryptSymmetric(symmetricKey, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &EncryptedMessageProtocol{
|
||||
DHHeader: &DHHeader{
|
||||
Key: crypto.CompressPubkey(ourEphemeralKey),
|
||||
},
|
||||
Payload: encryptedPayload,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *encryptor) EncryptPayloadWithDH(theirIdentityKey *ecdsa.PublicKey, payload []byte) (map[string]*EncryptedMessageProtocol, error) {
|
||||
response := make(map[string]*EncryptedMessageProtocol)
|
||||
dmp, err := s.encryptWithDH(theirIdentityKey, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response[noInstallationID] = dmp
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// GetPublicBundle returns the active installations bundles for a given user
|
||||
func (s *encryptor) GetPublicBundle(theirIdentityKey *ecdsa.PublicKey, installations []*multidevice.Installation) (*Bundle, error) {
|
||||
return s.persistence.GetPublicBundle(theirIdentityKey, installations)
|
||||
}
|
||||
|
||||
// EncryptPayload returns a new EncryptedMessageProtocol with a given payload encrypted, given a recipient's public key and the sender private identity key
|
||||
func (s *encryptor) EncryptPayload(theirIdentityKey *ecdsa.PublicKey, myIdentityKey *ecdsa.PrivateKey, installations []*multidevice.Installation, payload []byte) (map[string]*EncryptedMessageProtocol, []*multidevice.Installation, error) {
|
||||
logger := s.logger.With(
|
||||
zap.String("site", "EncryptPayload"),
|
||||
zap.String("their-identity-key", types.EncodeHex(crypto.FromECDSAPub(theirIdentityKey))))
|
||||
|
||||
// Which installations we are sending the message to
|
||||
var targetedInstallations []*multidevice.Installation
|
||||
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
if len(installations) == 0 {
|
||||
// We don't have any, send a message with DH
|
||||
logger.Debug("no installations, sending to all devices")
|
||||
encryptedPayload, err := s.EncryptPayloadWithDH(theirIdentityKey, payload)
|
||||
return encryptedPayload, targetedInstallations, err
|
||||
}
|
||||
|
||||
theirIdentityKeyC := crypto.CompressPubkey(theirIdentityKey)
|
||||
response := make(map[string]*EncryptedMessageProtocol)
|
||||
|
||||
for _, installation := range installations {
|
||||
installationID := installation.ID
|
||||
ilogger := logger.With(zap.String("installation-id", installationID))
|
||||
ilogger.Debug("processing installation")
|
||||
if s.config.InstallationID == installationID {
|
||||
continue
|
||||
}
|
||||
|
||||
bundle, err := s.persistence.GetPublicBundle(theirIdentityKey, []*multidevice.Installation{installation})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// See if a session is there already
|
||||
drInfo, err := s.persistence.GetAnyRatchetInfo(theirIdentityKeyC, installationID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
targetedInstallations = append(targetedInstallations, installation)
|
||||
|
||||
if drInfo != nil {
|
||||
ilogger.Debug("found DR info for installation")
|
||||
encryptedPayload, drHeader, err := s.encryptUsingDR(theirIdentityKey, drInfo, payload)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
dmp := EncryptedMessageProtocol{
|
||||
Payload: encryptedPayload,
|
||||
DRHeader: drHeader,
|
||||
}
|
||||
|
||||
if drInfo.EphemeralKey != nil {
|
||||
dmp.X3DHHeader = &X3DHHeader{
|
||||
Key: drInfo.EphemeralKey,
|
||||
Id: drInfo.BundleID,
|
||||
}
|
||||
}
|
||||
|
||||
response[drInfo.InstallationID] = &dmp
|
||||
continue
|
||||
}
|
||||
|
||||
theirSignedPreKeyContainer := bundle.GetSignedPreKeys()[installationID]
|
||||
|
||||
// This should not be nil at this point
|
||||
if theirSignedPreKeyContainer == nil {
|
||||
ilogger.Warn("could not find DR info or bundle for installation")
|
||||
continue
|
||||
|
||||
}
|
||||
|
||||
ilogger.Debug("DR info not found, using bundle")
|
||||
|
||||
theirSignedPreKey := theirSignedPreKeyContainer.GetSignedPreKey()
|
||||
|
||||
sharedKey, ourEphemeralKey, err := s.keyFromActiveX3DH(theirIdentityKeyC, theirSignedPreKey, myIdentityKey)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
theirIdentityKeyC := crypto.CompressPubkey(theirIdentityKey)
|
||||
ourEphemeralKeyC := crypto.CompressPubkey(ourEphemeralKey)
|
||||
|
||||
err = s.persistence.AddRatchetInfo(sharedKey, theirIdentityKeyC, theirSignedPreKey, ourEphemeralKeyC, installationID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
x3dhHeader := &X3DHHeader{
|
||||
Key: ourEphemeralKeyC,
|
||||
Id: theirSignedPreKey,
|
||||
}
|
||||
|
||||
drInfo, err = s.persistence.GetRatchetInfo(theirSignedPreKey, theirIdentityKeyC, installationID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if drInfo != nil {
|
||||
encryptedPayload, drHeader, err := s.encryptUsingDR(theirIdentityKey, drInfo, payload)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
dmp := &EncryptedMessageProtocol{
|
||||
Payload: encryptedPayload,
|
||||
X3DHHeader: x3dhHeader,
|
||||
DRHeader: drHeader,
|
||||
}
|
||||
|
||||
response[drInfo.InstallationID] = dmp
|
||||
}
|
||||
}
|
||||
|
||||
var installationIDs []string
|
||||
for _, i := range targetedInstallations {
|
||||
installationIDs = append(installationIDs, i.ID)
|
||||
}
|
||||
logger.Info(
|
||||
"built a message",
|
||||
zap.Strings("installation-ids", installationIDs),
|
||||
)
|
||||
|
||||
return response, targetedInstallations, nil
|
||||
}
|
||||
|
||||
func (s *encryptor) getNextHashRatchet(groupID []byte) (*HashRatchetKeyCompatibility, error) {
|
||||
latestKey, err := s.persistence.GetCurrentKeyForGroup(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return latestKey.GenerateNext()
|
||||
}
|
||||
|
||||
// GenerateHashRatchetKey Generates and stores a hash ratchet key given a group ID
|
||||
func (s *encryptor) GenerateHashRatchetKey(groupID []byte) (*HashRatchetKeyCompatibility, error) {
|
||||
|
||||
key, err := s.getNextHashRatchet(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return key, s.persistence.SaveHashRatchetKey(key)
|
||||
}
|
||||
|
||||
// EncryptHashRatchetPayload returns a new EncryptedMessageProtocol with a given payload encrypted, given a group's key
|
||||
func (s *encryptor) EncryptHashRatchetPayload(ratchet *HashRatchetKeyCompatibility, payload []byte) (map[string]*EncryptedMessageProtocol, error) {
|
||||
logger := s.logger.With(
|
||||
zap.String("site", "EncryptHashRatchetPayload"),
|
||||
zap.Any("group-id", ratchet.GroupID),
|
||||
zap.Any("key-id", ratchet.keyID))
|
||||
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
logger.Debug("encrypting hash ratchet message")
|
||||
encryptedPayload, newSeqNo, err := s.EncryptWithHR(ratchet, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
keyID, err := ratchet.GetKeyID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dmp := &EncryptedMessageProtocol{
|
||||
HRHeader: &HRHeader{
|
||||
DeprecatedKeyId: ratchet.DeprecatedKeyID(),
|
||||
GroupId: ratchet.GroupID,
|
||||
KeyId: keyID,
|
||||
SeqNo: newSeqNo,
|
||||
},
|
||||
Payload: encryptedPayload,
|
||||
}
|
||||
|
||||
response := make(map[string]*EncryptedMessageProtocol)
|
||||
response[noInstallationID] = dmp
|
||||
return response, err
|
||||
}
|
||||
|
||||
func samePublicKeys(pubKey1, pubKey2 ecdsa.PublicKey) bool {
|
||||
return pubKey1.X.Cmp(pubKey2.X) == 0 && pubKey1.Y.Cmp(pubKey2.Y) == 0
|
||||
}
|
||||
|
||||
func (s *encryptor) EncryptWithHR(ratchet *HashRatchetKeyCompatibility, payload []byte) ([]byte, uint32, error) {
|
||||
hrCache, err := s.persistence.GetHashRatchetCache(ratchet, 0) // Get latest seqNo
|
||||
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if hrCache == nil {
|
||||
return nil, 0, ErrNoEncryptionKey
|
||||
}
|
||||
|
||||
var dbHash []byte
|
||||
if len(hrCache.Hash) == 0 {
|
||||
dbHash = hrCache.Key
|
||||
} else {
|
||||
dbHash = hrCache.Hash
|
||||
}
|
||||
|
||||
hash := crypto.Keccak256Hash(dbHash)
|
||||
encryptedPayload, err := crypto.EncryptSymmetric(hash.Bytes(), payload)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
newSeqNo := hrCache.SeqNo + 1
|
||||
err = s.persistence.SaveHashRatchetKeyHash(ratchet, hash.Bytes(), newSeqNo)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
return encryptedPayload, newSeqNo, nil
|
||||
}
|
||||
|
||||
func (s *encryptor) DecryptWithHR(ratchet *HashRatchetKeyCompatibility, seqNo uint32, payload []byte) ([]byte, error) {
|
||||
// Key exchange message, nothing to decrypt
|
||||
if seqNo == 0 {
|
||||
return payload, nil
|
||||
}
|
||||
|
||||
hrCache, err := s.persistence.GetHashRatchetCache(ratchet, seqNo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if hrCache == nil {
|
||||
return nil, ErrHashRatchetGroupIDNotFound
|
||||
}
|
||||
|
||||
// Handle mesages with seqNo less than the one in db
|
||||
// 1. Check cache. If present for a particular seqNo, all good
|
||||
// 2. Otherwise, get the latest one for that keyId
|
||||
// 3. Every time the key is generated, it has to be saved in the cache along with the hash
|
||||
var hash []byte = hrCache.Hash
|
||||
if hrCache.SeqNo == seqNo {
|
||||
// We already have the hash for this seqNo
|
||||
hash = hrCache.Hash
|
||||
} else {
|
||||
if hrCache.SeqNo == 0 {
|
||||
// No cache records found for this keyId
|
||||
hash = hrCache.Key
|
||||
}
|
||||
// We should not have "holes" in seq numbers,
|
||||
// so a case when hrCache.SeqNo > seqNo shouldn't occur
|
||||
if seqNo-hrCache.SeqNo > maxHashRatchetSeqNoDelta {
|
||||
return nil, ErrHashRatchetSeqNoTooHigh
|
||||
}
|
||||
for i := hrCache.SeqNo; i < seqNo; i++ {
|
||||
hash = crypto.Keccak256Hash(hash).Bytes()
|
||||
err := s.persistence.SaveHashRatchetKeyHash(ratchet, hash, i+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
decryptedPayload, err := crypto.DecryptSymmetric(hash, payload)
|
||||
|
||||
if err != nil {
|
||||
s.logger.Error("failed to decrypt hash", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
return decryptedPayload, nil
|
||||
}
|
||||
164
vendor/github.com/status-im/status-go/protocol/encryption/helpers.go
generated
vendored
Normal file
164
vendor/github.com/status-im/status-go/protocol/encryption/helpers.go
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
package encryption
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/ecdsa"
|
||||
"crypto/rand"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/crypto/ecies"
|
||||
)
|
||||
|
||||
const keyBumpValue = uint64(10)
|
||||
|
||||
// GetCurrentTime64 returns the current unix time in milliseconds
|
||||
func GetCurrentTime() uint64 {
|
||||
return (uint64)(time.Now().UnixNano() / int64(time.Millisecond))
|
||||
}
|
||||
|
||||
// bumpKeyID takes a timestampID and returns its value incremented by the keyBumpValue
|
||||
func bumpKeyID(timestampID uint64) uint64 {
|
||||
return timestampID + keyBumpValue
|
||||
}
|
||||
|
||||
func generateHashRatchetKeyID(groupID []byte, timestamp uint64, keyBytes []byte) []byte {
|
||||
var keyMaterial []byte
|
||||
|
||||
keyMaterial = append(keyMaterial, groupID...)
|
||||
|
||||
timestampBytes := make([]byte, 8) // 8 bytes for a uint64
|
||||
binary.LittleEndian.PutUint64(timestampBytes, timestamp)
|
||||
keyMaterial = append(keyMaterial, timestampBytes...)
|
||||
|
||||
keyMaterial = append(keyMaterial, keyBytes...)
|
||||
|
||||
return crypto.Keccak256(keyMaterial)
|
||||
}
|
||||
|
||||
func publicKeyMostRelevantBytes(key *ecdsa.PublicKey) uint32 {
|
||||
|
||||
keyBytes := crypto.FromECDSAPub(key)
|
||||
|
||||
return binary.LittleEndian.Uint32(keyBytes[1:5])
|
||||
}
|
||||
|
||||
func encrypt(plaintext []byte, key []byte, reader io.Reader) ([]byte, error) {
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := make([]byte, gcm.NonceSize())
|
||||
if _, err = io.ReadFull(reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gcm.Seal(nonce, nonce, plaintext, nil), nil
|
||||
}
|
||||
|
||||
func generateSharedKey(privateKey *ecdsa.PrivateKey, publicKey *ecdsa.PublicKey) ([]byte, error) {
|
||||
|
||||
const encryptedPayloadKeyLength = 16
|
||||
|
||||
return ecies.ImportECDSA(privateKey).GenerateShared(
|
||||
ecies.ImportECDSAPublic(publicKey),
|
||||
encryptedPayloadKeyLength,
|
||||
encryptedPayloadKeyLength,
|
||||
)
|
||||
}
|
||||
|
||||
func buildGroupRekeyMessage(privateKey *ecdsa.PrivateKey, groupID []byte, timestamp uint64, keyMaterial []byte, keys []*ecdsa.PublicKey) (*RekeyGroup, error) {
|
||||
|
||||
message := &RekeyGroup{
|
||||
Timestamp: timestamp,
|
||||
}
|
||||
|
||||
message.Keys = make(map[uint32][]byte)
|
||||
|
||||
for _, k := range keys {
|
||||
|
||||
sharedKey, err := generateSharedKey(privateKey, k)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
encryptedKey, err := encrypt(keyMaterial, sharedKey, rand.Reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
kBytes := publicKeyMostRelevantBytes(k)
|
||||
|
||||
if message.Keys[kBytes] == nil {
|
||||
message.Keys[kBytes] = encryptedKey
|
||||
} else {
|
||||
message.Keys[kBytes] = append(message.Keys[kBytes], encryptedKey...)
|
||||
}
|
||||
}
|
||||
|
||||
return message, nil
|
||||
}
|
||||
|
||||
const nonceLength = 12
|
||||
|
||||
func decrypt(cyphertext []byte, key []byte) ([]byte, error) {
|
||||
if len(cyphertext) < nonceLength {
|
||||
return nil, errors.New("invalid cyphertext length")
|
||||
}
|
||||
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := cyphertext[:nonceLength]
|
||||
return gcm.Open(nil, nonce, cyphertext[nonceLength:], nil)
|
||||
}
|
||||
|
||||
const keySize = 60
|
||||
|
||||
func decryptGroupRekeyMessage(privateKey *ecdsa.PrivateKey, publicKey *ecdsa.PublicKey, message *RekeyGroup) ([]byte, error) {
|
||||
kBytes := publicKeyMostRelevantBytes(&privateKey.PublicKey)
|
||||
if message.Keys == nil || message.Keys[kBytes] == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
sharedKey, err := generateSharedKey(privateKey, publicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
keys := message.Keys[kBytes]
|
||||
|
||||
nKeys := len(keys) / keySize
|
||||
|
||||
var decryptedKey []byte
|
||||
for i := 0; i < nKeys; i++ {
|
||||
|
||||
encryptedKey := keys[i*keySize : i*keySize+keySize]
|
||||
decryptedKey, err = decrypt(encryptedKey, sharedKey)
|
||||
if err != nil {
|
||||
continue
|
||||
} else {
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return decryptedKey, nil
|
||||
}
|
||||
712
vendor/github.com/status-im/status-go/protocol/encryption/migrations/migrations.go
generated
vendored
Normal file
712
vendor/github.com/status-im/status-go/protocol/encryption/migrations/migrations.go
generated
vendored
Normal file
@@ -0,0 +1,712 @@
|
||||
// Code generated by go-bindata. DO NOT EDIT.
|
||||
// sources:
|
||||
// 1536754952_initial_schema.down.sql (83B)
|
||||
// 1536754952_initial_schema.up.sql (962B)
|
||||
// 1539249977_update_ratchet_info.down.sql (311B)
|
||||
// 1539249977_update_ratchet_info.up.sql (368B)
|
||||
// 1540715431_add_version.down.sql (127B)
|
||||
// 1540715431_add_version.up.sql (265B)
|
||||
// 1541164797_add_installations.down.sql (26B)
|
||||
// 1541164797_add_installations.up.sql (216B)
|
||||
// 1558084410_add_secret.down.sql (56B)
|
||||
// 1558084410_add_secret.up.sql (301B)
|
||||
// 1558588866_add_version.down.sql (47B)
|
||||
// 1558588866_add_version.up.sql (57B)
|
||||
// 1559627659_add_contact_code.down.sql (32B)
|
||||
// 1559627659_add_contact_code.up.sql (198B)
|
||||
// 1561368210_add_installation_metadata.down.sql (35B)
|
||||
// 1561368210_add_installation_metadata.up.sql (267B)
|
||||
// 1632236298_add_communities.down.sql (151B)
|
||||
// 1632236298_add_communities.up.sql (584B)
|
||||
// 1636536507_add_index_bundles.up.sql (347B)
|
||||
// doc.go (377B)
|
||||
|
||||
package migrations
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func bindataRead(data []byte, name string) ([]byte, error) {
|
||||
gz, err := gzip.NewReader(bytes.NewBuffer(data))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read %q: %w", name, err)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
_, err = io.Copy(&buf, gz)
|
||||
clErr := gz.Close()
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read %q: %w", name, err)
|
||||
}
|
||||
if clErr != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
type asset struct {
|
||||
bytes []byte
|
||||
info os.FileInfo
|
||||
digest [sha256.Size]byte
|
||||
}
|
||||
|
||||
type bindataFileInfo struct {
|
||||
name string
|
||||
size int64
|
||||
mode os.FileMode
|
||||
modTime time.Time
|
||||
}
|
||||
|
||||
func (fi bindataFileInfo) Name() string {
|
||||
return fi.name
|
||||
}
|
||||
func (fi bindataFileInfo) Size() int64 {
|
||||
return fi.size
|
||||
}
|
||||
func (fi bindataFileInfo) Mode() os.FileMode {
|
||||
return fi.mode
|
||||
}
|
||||
func (fi bindataFileInfo) ModTime() time.Time {
|
||||
return fi.modTime
|
||||
}
|
||||
func (fi bindataFileInfo) IsDir() bool {
|
||||
return false
|
||||
}
|
||||
func (fi bindataFileInfo) Sys() interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
var __1536754952_initial_schemaDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\x28\x4e\x2d\x2e\xce\xcc\xcf\x2b\xb6\xe6\x42\x12\x4c\x2a\xcd\x4b\xc9\x49\x45\x15\xcb\x4e\xad\x44\x15\x28\x4a\x2c\x49\xce\x48\x2d\x89\xcf\xcc\x4b\xcb\xb7\xe6\x02\x04\x00\x00\xff\xff\x72\x61\x3f\x92\x53\x00\x00\x00")
|
||||
|
||||
func _1536754952_initial_schemaDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1536754952_initial_schemaDownSql,
|
||||
"1536754952_initial_schema.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1536754952_initial_schemaDownSql() (*asset, error) {
|
||||
bytes, err := _1536754952_initial_schemaDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1536754952_initial_schema.down.sql", size: 83, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x44, 0xcf, 0x76, 0x71, 0x1f, 0x5e, 0x9a, 0x43, 0xd8, 0xcd, 0xb8, 0xc3, 0x70, 0xc3, 0x7f, 0xfc, 0x90, 0xb4, 0x25, 0x1e, 0xf4, 0x66, 0x20, 0xb8, 0x33, 0x7e, 0xb0, 0x76, 0x1f, 0xc, 0xc0, 0x75}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1536754952_initial_schemaUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x52\xc1\x8e\x9b\x30\x10\xbd\xe7\x2b\xe6\x98\x48\x39\xf4\xde\x13\xb0\x13\x84\x4a\xcd\xd6\x0b\x52\xf7\x64\x79\xe3\x69\xb0\x16\x1b\x64\x3b\xab\xe6\xef\x2b\x20\xa5\xb8\x65\xdb\xde\x98\xc7\x9b\x99\x37\xef\x39\xe3\x98\xd4\x08\x75\x92\x96\x08\x9e\xbc\xd7\xbd\xf5\xb0\xdf\x01\xa8\xd6\x41\x5a\x56\xe9\x71\xfa\xf6\x62\xb8\xbe\x74\xfa\x1c\x43\x4e\xbf\xc9\x40\x0b\xe6\xfa\x3e\x88\x73\x2b\xb5\x15\xaf\x74\x5b\x60\x4f\x56\xfd\x1d\xb6\x50\xb0\x1a\x73\xe4\xd3\x14\x3a\xbf\x6d\xd0\x57\x70\x44\xf7\x81\x86\x75\x3d\x58\x58\x97\x5a\x4d\x13\x80\x55\x35\xb0\xa6\x2c\xe1\x91\x17\x9f\x13\xfe\x0c\x9f\xf0\x79\xfc\xdf\xb0\xe2\x4b\x83\x7b\xad\x0e\x50\x31\xc8\x2a\x76\x2a\x8b\xac\x06\x8e\x8f\x65\x92\xe1\xee\xf0\x71\xb7\x8b\x3c\x7a\xa5\xdb\xec\xcf\xec\xc7\x22\x71\x59\x30\x0e\x35\xfe\x22\xec\xd5\xac\x75\x18\xf2\x5e\x5e\x68\x9b\x3f\x8b\x80\xfd\xbd\xef\xb8\x66\xff\xa7\xae\x97\xab\x55\x1d\xcd\xd2\xb4\x22\x1b\x74\xd8\x58\xa4\xad\x0f\xb2\xeb\x64\xd0\xbd\x15\x5a\x41\x8d\x5f\xeb\x88\x70\x8f\x34\x0e\x4a\x5f\x2c\x29\x31\xb8\x0d\xf5\x6b\x3b\x23\xa1\x45\xce\x2a\x8e\x63\x7b\xd0\x86\x7c\x90\x66\x80\x86\x3d\x15\x39\xc3\x07\x48\x8b\x7c\xf4\x26\xda\x4c\xdf\x07\xed\x48\x41\x5a\x55\x25\x26\x0c\x1e\xf0\x94\x34\x65\x0d\x1f\xfe\xbc\xd5\xc9\x70\x6e\x29\x08\x6d\xbf\xf5\xd3\xc1\xf3\xf1\xe2\xf7\xac\xa7\xb1\x43\x4b\x86\x9c\xec\xa2\x93\xde\x77\xc8\xdf\x8c\xa1\xe0\xde\x4b\xf6\x9f\x06\xde\xdf\xd3\xa2\xe8\xb8\xec\xda\x0c\x72\x6c\x39\x55\x1c\x8b\x9c\x4d\x16\xfe\x6a\x3c\x00\xc7\x13\x72\x64\x19\x3e\xfd\x4c\x77\x1f\x47\x71\x18\xad\xf9\x11\x00\x00\xff\xff\xa9\x50\xa8\xb2\xc2\x03\x00\x00")
|
||||
|
||||
func _1536754952_initial_schemaUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1536754952_initial_schemaUpSql,
|
||||
"1536754952_initial_schema.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1536754952_initial_schemaUpSql() (*asset, error) {
|
||||
bytes, err := _1536754952_initial_schemaUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1536754952_initial_schema.up.sql", size: 962, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xea, 0x90, 0x5a, 0x59, 0x3e, 0x3, 0xe2, 0x3c, 0x81, 0x42, 0xcd, 0x4c, 0x9a, 0xe8, 0xda, 0x93, 0x2b, 0x70, 0xa4, 0xd5, 0x29, 0x3e, 0xd5, 0xc9, 0x27, 0xb6, 0xb7, 0x65, 0xff, 0x0, 0xcb, 0xde}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1539249977_update_ratchet_infoDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x8f\x41\x4b\xc4\x30\x10\x85\xef\xf9\x15\xef\xd8\xc2\x9e\xbc\xee\xa9\x8d\x53\x29\x86\x64\x8d\x29\xe8\x29\xd4\xed\xe8\x06\xdb\xec\xd2\x46\xa1\xff\x5e\x22\x52\x59\xf4\x3a\xdf\xf7\x78\x6f\x6e\xad\x39\xc0\x55\xb5\x22\xcc\x7d\x3a\x9e\x38\xf9\x10\x5f\xcf\xfe\xf3\x66\x2f\x84\xb4\x54\x39\xfa\x07\xa3\x10\xc0\xcb\x47\x1c\x46\xf6\x61\x40\xad\x4c\x0d\x6d\x1c\x74\xa7\xd4\x4e\x00\x7c\x39\xf1\xc4\x73\x3f\xfa\x77\x5e\xbf\x71\xbe\x86\x81\x63\x0a\x69\xfd\xeb\x2f\xeb\x34\x71\x9a\xc3\x71\xf3\xaf\x70\x88\x4b\xea\xc7\xb1\x4f\xe1\x1c\x73\x9f\xa3\x27\x77\x25\x74\xba\x7d\xe8\xa8\xd8\x16\xed\xb6\xae\x12\x46\x43\x1a\xdd\xa8\x56\x3a\x58\x3a\xa8\x4a\x52\x8e\x34\xc6\x52\x7b\xa7\x71\x4f\xcf\xf8\x0d\x96\xb0\xd4\x90\x25\x2d\xe9\xf1\xe7\xc1\xa5\x58\xc2\x5b\xe4\xc1\x5f\x66\xce\xf3\x4a\x51\xee\xc5\x57\x00\x00\x00\xff\xff\x69\x51\x9b\xb4\x37\x01\x00\x00")
|
||||
|
||||
func _1539249977_update_ratchet_infoDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1539249977_update_ratchet_infoDownSql,
|
||||
"1539249977_update_ratchet_info.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1539249977_update_ratchet_infoDownSql() (*asset, error) {
|
||||
bytes, err := _1539249977_update_ratchet_infoDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1539249977_update_ratchet_info.down.sql", size: 311, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x1, 0xa4, 0xeb, 0xa0, 0xe6, 0xa0, 0xd4, 0x48, 0xbb, 0xad, 0x6f, 0x7d, 0x67, 0x8c, 0xbd, 0x25, 0xde, 0x1f, 0x73, 0x9a, 0xbb, 0xa8, 0xc9, 0x30, 0xb7, 0xa9, 0x7c, 0xaf, 0xb5, 0x1, 0x61, 0xdd}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1539249977_update_ratchet_infoUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x8f\x41\x4f\x84\x30\x10\x85\xef\xfd\x15\x73\x84\x84\x93\x57\x4e\x50\x07\x43\xac\xed\x5a\x4b\xa2\xa7\x06\x97\xd1\x6d\x16\xca\x86\x56\x13\xfe\xbd\xa9\x31\x28\xea\xf5\xbd\x6f\xde\x7b\x73\x8d\x02\x0d\x42\xa3\xd5\x1d\x04\x0a\xc1\xcd\x3e\x94\xec\xa7\x7a\xa6\x35\x29\x5a\x1d\xc0\x54\xb5\x40\x58\xfa\x78\x3c\x51\xb4\xce\xbf\xcc\x25\x63\x5c\x63\x65\xf0\x1f\xcf\xbe\x5f\x41\xc6\x00\x9e\xdf\xfc\x30\x92\x75\x03\xd4\x42\xd5\x20\x95\x01\xd9\x09\x51\x30\x00\xba\x9c\x68\xa2\xa5\x1f\xed\x99\xd6\x4f\x3b\xa9\x6e\x20\x1f\x5d\x5c\xff\xf2\x61\x9d\x26\x8a\x8b\x3b\x6e\xfc\xce\x76\x3e\xc4\x7e\x1c\xfb\xe8\x66\x9f\xfa\x0c\x3e\x9a\x1d\xd0\xc9\xf6\xbe\xc3\x6c\x5b\x54\x6c\x5d\xc5\xef\xe3\x1c\x94\x04\xae\x64\x23\x5a\x6e\x40\xe3\x41\x54\x1c\x53\x46\xa3\x34\xb6\x37\x12\x6e\xf1\x09\xbe\x93\x72\xd0\xd8\xa0\x46\xc9\xf1\xe1\xeb\xe3\x90\x05\xf7\xea\x69\xb0\x97\x85\xd2\xde\x9c\xe5\x25\xfb\x08\x00\x00\xff\xff\xb6\x31\x2b\x32\x70\x01\x00\x00")
|
||||
|
||||
func _1539249977_update_ratchet_infoUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1539249977_update_ratchet_infoUpSql,
|
||||
"1539249977_update_ratchet_info.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1539249977_update_ratchet_infoUpSql() (*asset, error) {
|
||||
bytes, err := _1539249977_update_ratchet_infoUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1539249977_update_ratchet_info.up.sql", size: 368, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc, 0x8e, 0xbf, 0x6f, 0xa, 0xc0, 0xe1, 0x3c, 0x42, 0x28, 0x88, 0x1d, 0xdb, 0xba, 0x1c, 0x83, 0xec, 0xba, 0xd3, 0x5f, 0x5c, 0x77, 0x5e, 0xa7, 0x46, 0x36, 0xec, 0x69, 0xa, 0x4b, 0x17, 0x79}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1540715431_add_versionDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\xf4\x09\x71\x0d\x52\x08\x71\x74\xf2\x71\x55\xc8\x4e\xad\x2c\x56\x70\x09\xf2\x0f\x50\x70\xf6\xf7\x09\xf5\xf5\x53\x28\x4e\x2d\x2e\xce\xcc\xcf\x8b\xcf\x4c\xb1\xe6\x42\x56\x08\x15\x47\x55\x0c\xd2\x1d\x9f\x9c\x5f\x9a\x57\x82\xaa\x38\xa9\x34\x2f\x25\x27\x15\x55\x6d\x59\x6a\x11\xc8\x00\x6b\x2e\x40\x00\x00\x00\xff\xff\xda\x5d\x80\x2d\x7f\x00\x00\x00")
|
||||
|
||||
func _1540715431_add_versionDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1540715431_add_versionDownSql,
|
||||
"1540715431_add_version.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1540715431_add_versionDownSql() (*asset, error) {
|
||||
bytes, err := _1540715431_add_versionDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1540715431_add_version.down.sql", size: 127, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf5, 0x9, 0x4, 0xe3, 0x76, 0x2e, 0xb8, 0x9, 0x23, 0xf0, 0x70, 0x93, 0xc4, 0x50, 0xe, 0x9d, 0x84, 0x22, 0x8c, 0x94, 0xd3, 0x24, 0x9, 0x9a, 0xc1, 0xa1, 0x48, 0x45, 0xfd, 0x40, 0x6e, 0xe6}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1540715431_add_versionUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\xcd\xb1\x0e\x02\x21\x0c\xc6\xf1\xdd\xa7\xf8\x1e\xc1\xdd\x09\xa4\x67\x4c\x7a\x90\x90\x32\x93\xe8\x31\x5c\x54\x2e\x8a\x98\xf8\xf6\x06\xe3\xc2\xa2\xae\x6d\xff\xbf\x1a\x62\x12\xc2\xe0\xdd\x88\x53\x7a\x96\xcd\x4a\xb1\x90\x87\x28\xcd\xf4\x9e\x40\x19\x83\xad\xe3\x30\x5a\x94\x74\x8d\xb9\x5e\xb0\xb7\x42\x3b\xf2\xb0\x4e\x60\x03\x33\x0c\x0d\x2a\xb0\x60\xfd\xab\x2f\x65\x5e\x72\x9c\x27\x68\x76\xba\x3f\xfe\x2c\xbb\xa0\x01\xf1\xb8\xd4\x7c\xff\xfb\xe7\xa1\xe6\xe9\x9c\x3a\xe5\x91\x6e\x4d\xfe\x4a\xbc\x02\x00\x00\xff\xff\x0e\x27\x2c\x52\x09\x01\x00\x00")
|
||||
|
||||
func _1540715431_add_versionUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1540715431_add_versionUpSql,
|
||||
"1540715431_add_version.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1540715431_add_versionUpSql() (*asset, error) {
|
||||
bytes, err := _1540715431_add_versionUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1540715431_add_version.up.sql", size: 265, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xc7, 0x4c, 0x36, 0x96, 0xdf, 0x16, 0x10, 0xa6, 0x27, 0x1a, 0x79, 0x8b, 0x42, 0x83, 0x23, 0xc, 0x7e, 0xb6, 0x3d, 0x2, 0xda, 0xa4, 0xb4, 0xd, 0x27, 0x55, 0xba, 0xdc, 0xb2, 0x88, 0x8f, 0xa6}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1541164797_add_installationsDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\xc8\xcc\x2b\x2e\x49\xcc\xc9\x49\x2c\xc9\xcc\xcf\x2b\xb6\xe6\x02\x04\x00\x00\xff\xff\xd8\xbf\x14\x75\x1a\x00\x00\x00")
|
||||
|
||||
func _1541164797_add_installationsDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1541164797_add_installationsDownSql,
|
||||
"1541164797_add_installations.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1541164797_add_installationsDownSql() (*asset, error) {
|
||||
bytes, err := _1541164797_add_installationsDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1541164797_add_installations.down.sql", size: 26, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf5, 0xfd, 0xe6, 0xd8, 0xca, 0x3b, 0x38, 0x18, 0xee, 0x0, 0x5f, 0x36, 0x9e, 0x1e, 0xd, 0x19, 0x3e, 0xb4, 0x73, 0x53, 0xe9, 0xa5, 0xac, 0xdd, 0xa1, 0x2f, 0xc7, 0x6c, 0xa8, 0xd9, 0xa, 0x88}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1541164797_add_installationsUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\xce\xb1\x6a\xc3\x30\x14\x85\xe1\xdd\x4f\x71\x46\x1b\xbc\x74\xee\x24\xc9\xd7\x46\x70\xb9\x6a\x5d\x09\xba\x05\x05\x6b\x10\xd8\x4a\xc0\x5a\xf2\xf6\xc1\x43\x20\xce\xfc\x7f\x70\x8e\x99\x49\x79\x82\x57\x9a\x09\xb9\xec\x35\xae\x6b\xac\xf9\x56\x76\xa0\x6d\x80\xbc\xa4\x52\x73\x7d\x40\xb3\xd3\x10\xe7\x21\x81\xb9\x3f\xca\x1b\xbe\xe4\x05\x9e\xfe\xfd\x09\xd4\xbc\xa5\xbd\xc6\xed\x8e\x20\x7f\x76\x12\x1a\xa0\xed\x04\x2b\x67\x96\x4a\xbc\xae\x69\x81\x76\x8e\x49\x09\x06\x1a\x55\x60\x8f\xaf\x23\x06\xb1\xbf\x81\xda\xd7\x8b\xfe\x73\xb5\x83\x13\x18\x27\x23\x5b\xe3\x31\xd3\x0f\x2b\x43\x4d\xf7\xdd\x3c\x03\x00\x00\xff\xff\x28\x14\xac\x9d\xd8\x00\x00\x00")
|
||||
|
||||
func _1541164797_add_installationsUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1541164797_add_installationsUpSql,
|
||||
"1541164797_add_installations.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1541164797_add_installationsUpSql() (*asset, error) {
|
||||
bytes, err := _1541164797_add_installationsUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1541164797_add_installations.up.sql", size: 216, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x2d, 0x18, 0x26, 0xb8, 0x88, 0x47, 0xdb, 0x83, 0xcc, 0xb6, 0x9d, 0x1c, 0x1, 0xae, 0x2f, 0xde, 0x97, 0x82, 0x3, 0x30, 0xa8, 0x63, 0xa1, 0x78, 0x4b, 0xa5, 0x9, 0x8, 0x75, 0xa2, 0x57, 0x81}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1558084410_add_secretDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\x28\x4e\x4d\x2e\x4a\x2d\x89\xcf\xcc\x2b\x2e\x49\xcc\xc9\x49\x2c\xc9\xcc\xcf\x8b\xcf\x4c\x29\xb6\xe6\xc2\x50\x53\x6c\xcd\x05\x08\x00\x00\xff\xff\xd3\xcd\x41\x83\x38\x00\x00\x00")
|
||||
|
||||
func _1558084410_add_secretDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1558084410_add_secretDownSql,
|
||||
"1558084410_add_secret.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1558084410_add_secretDownSql() (*asset, error) {
|
||||
bytes, err := _1558084410_add_secretDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1558084410_add_secret.down.sql", size: 56, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x49, 0xb, 0x65, 0xdf, 0x59, 0xbf, 0xe9, 0x5, 0x5b, 0x6f, 0xd5, 0x3a, 0xb7, 0x57, 0xe8, 0x78, 0x38, 0x73, 0x53, 0x57, 0xf7, 0x24, 0x4, 0xe4, 0xa2, 0x49, 0x22, 0xa2, 0xc6, 0xfd, 0x80, 0xa4}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1558084410_add_secretUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\x50\xcf\x0a\x82\x30\x1c\xbe\xef\x29\xbe\xa3\x82\x6f\xd0\x49\xc7\x4f\x19\xad\xdf\x6a\x4d\xc8\x93\x48\xf3\x30\x10\x83\xdc\xa5\xb7\x0f\x23\x45\xa1\xce\xdf\xff\x4f\x5a\xca\x1d\xc1\xe5\x85\x26\x4c\xfd\xfd\xd9\xc7\x09\x89\x00\x82\xef\xc7\x18\xe2\x0b\x85\x36\x05\xd8\x38\x70\xad\x35\xce\x56\x9d\x72\xdb\xe0\x48\x0d\x0c\x43\x1a\x2e\xb5\x92\x0e\xaa\x62\x63\x29\x13\xf8\x9a\xec\x65\x22\x3d\x08\xf1\x23\xaa\x0d\xe3\x14\xbb\x61\xe8\x62\x78\x8c\x6d\xf0\x4b\x34\x1c\xdd\xdc\xaa\xce\x36\x75\xda\xe0\xf7\xd6\x33\x58\xb3\xba\xd4\x94\x04\x9f\x6d\x79\xe9\x9f\x82\xa5\xb1\xa4\x2a\xfe\x4c\x48\x76\x7c\x4b\x25\x59\x62\x49\xd7\xe5\x8a\x15\x4f\xe7\x09\xef\x00\x00\x00\xff\xff\xa6\xbb\x2c\x23\x2d\x01\x00\x00")
|
||||
|
||||
func _1558084410_add_secretUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1558084410_add_secretUpSql,
|
||||
"1558084410_add_secret.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1558084410_add_secretUpSql() (*asset, error) {
|
||||
bytes, err := _1558084410_add_secretUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1558084410_add_secret.up.sql", size: 301, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf5, 0x32, 0x36, 0x8e, 0x47, 0xb0, 0x8f, 0xc1, 0xc6, 0xf7, 0xc6, 0x9f, 0x2d, 0x44, 0x75, 0x2b, 0x26, 0xec, 0x6, 0xa0, 0x7b, 0xa5, 0xbd, 0xc8, 0x76, 0x8a, 0x82, 0x68, 0x2, 0x42, 0xb5, 0xf4}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1558588866_add_versionDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\xf4\x09\x71\x0d\x52\x08\x71\x74\xf2\x71\x55\xc8\xcc\x2b\x2e\x49\xcc\xc9\x49\x2c\xc9\xcc\xcf\x2b\x56\x70\x09\xf2\x0f\x50\x70\xf6\xf7\x09\xf5\xf5\x53\x28\x4b\x2d\x2a\xce\xcc\xcf\xb3\xe6\x02\x04\x00\x00\xff\xff\xdf\x6b\x9f\xbb\x2f\x00\x00\x00")
|
||||
|
||||
func _1558588866_add_versionDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1558588866_add_versionDownSql,
|
||||
"1558588866_add_version.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1558588866_add_versionDownSql() (*asset, error) {
|
||||
bytes, err := _1558588866_add_versionDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1558588866_add_version.down.sql", size: 47, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xde, 0x52, 0x34, 0x3c, 0x46, 0x4a, 0xf0, 0x72, 0x47, 0x6f, 0x49, 0x5c, 0xc7, 0xf9, 0x32, 0xce, 0xc4, 0x3d, 0xfd, 0x61, 0xa1, 0x8b, 0x8f, 0xf2, 0x31, 0x34, 0xde, 0x15, 0x49, 0xa6, 0xde, 0xb9}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1558588866_add_versionUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\xf4\x09\x71\x0d\x52\x08\x71\x74\xf2\x71\x55\xc8\xcc\x2b\x2e\x49\xcc\xc9\x49\x2c\xc9\xcc\xcf\x2b\x56\x70\x74\x71\x51\x28\x4b\x2d\x2a\xce\xcc\xcf\x53\xf0\xf4\x0b\x71\x75\x77\x0d\x52\x70\x71\x75\x73\x0c\xf5\x09\x51\x30\xb0\xe6\x02\x04\x00\x00\xff\xff\x14\x7b\x07\xb5\x39\x00\x00\x00")
|
||||
|
||||
func _1558588866_add_versionUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1558588866_add_versionUpSql,
|
||||
"1558588866_add_version.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1558588866_add_versionUpSql() (*asset, error) {
|
||||
bytes, err := _1558588866_add_versionUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1558588866_add_version.up.sql", size: 57, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x2a, 0xea, 0x64, 0x39, 0x61, 0x20, 0x83, 0x83, 0xb, 0x2e, 0x79, 0x64, 0xb, 0x53, 0xfa, 0xfe, 0xc6, 0xf7, 0x67, 0x42, 0xd3, 0x4f, 0xdc, 0x7e, 0x30, 0x32, 0xe8, 0x14, 0x41, 0xe9, 0xe7, 0x3b}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1559627659_add_contact_codeDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\x48\xce\xcf\x2b\x49\x4c\x2e\x89\x4f\xce\x4f\x49\x8d\x4f\xce\xcf\x4b\xcb\x4c\xb7\xe6\x02\x04\x00\x00\xff\xff\x73\x7b\x50\x80\x20\x00\x00\x00")
|
||||
|
||||
func _1559627659_add_contact_codeDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1559627659_add_contact_codeDownSql,
|
||||
"1559627659_add_contact_code.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1559627659_add_contact_codeDownSql() (*asset, error) {
|
||||
bytes, err := _1559627659_add_contact_codeDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1559627659_add_contact_code.down.sql", size: 32, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x5d, 0x64, 0x6d, 0xce, 0x24, 0x42, 0x20, 0x8d, 0x4f, 0x37, 0xaa, 0x9d, 0xc, 0x57, 0x98, 0xc1, 0xd1, 0x1a, 0x34, 0xcd, 0x9f, 0x8f, 0x34, 0x86, 0xb3, 0xd3, 0xdc, 0xf1, 0x7d, 0xe5, 0x1b, 0x6e}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1559627659_add_contact_codeUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\xce\xc1\x8e\x82\x30\x18\x04\xe0\x7b\x9f\x62\x6e\x40\xb2\x07\xf6\xcc\xa9\xbb\xfb\xaf\x21\xd6\x62\x4a\x31\x72\x22\xb5\xa0\x34\x21\x45\xa1\xf8\xfc\x06\x13\xe3\xc5\xeb\xe4\x9b\xc9\xfc\x2a\xe2\x9a\xa0\xf9\x8f\x20\xd8\xd1\x07\x63\x43\x63\xc7\xb6\x6b\xec\xe8\xcf\xee\x82\x98\x01\x58\xbc\xbb\x2d\xcf\x68\x0e\x93\x71\x3e\xe0\x6e\x26\xdb\x9b\x29\xfe\x4e\x20\x0b\x0d\x59\x09\x81\xbd\xca\x77\x5c\xd5\xd8\x52\x8d\x3f\xfa\xe7\x95\xd0\x88\x8e\xd1\x17\x03\x06\x33\x87\xe6\xba\x9c\x06\x37\xf7\x5d\x8b\x5c\x6a\xda\x90\x7a\x57\x5f\x3c\x65\x49\xc6\x58\x2e\x4b\x52\x7a\x55\xc5\xc7\x4f\x07\x2e\x2a\x2a\x11\xaf\xe3\x48\x93\x8c\x3d\x02\x00\x00\xff\xff\xdc\x7c\x0c\xd3\xc6\x00\x00\x00")
|
||||
|
||||
func _1559627659_add_contact_codeUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1559627659_add_contact_codeUpSql,
|
||||
"1559627659_add_contact_code.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1559627659_add_contact_codeUpSql() (*asset, error) {
|
||||
bytes, err := _1559627659_add_contact_codeUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1559627659_add_contact_code.up.sql", size: 198, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x16, 0xf6, 0xc2, 0x62, 0x9c, 0xd2, 0xc9, 0x1e, 0xd8, 0xea, 0xaa, 0xea, 0x95, 0x8f, 0x89, 0x6a, 0x85, 0x5d, 0x9d, 0x99, 0x78, 0x3c, 0x90, 0x66, 0x99, 0x3e, 0x4b, 0x19, 0x62, 0xfb, 0x31, 0x4d}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1561368210_add_installation_metadataDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\x08\x71\x74\xf2\x71\x55\xc8\xcc\x2b\x2e\x49\xcc\xc9\x49\x2c\xc9\xcc\xcf\x2b\x8e\xcf\x4d\x2d\x49\x4c\x49\x2c\x49\xb4\xe6\x02\x04\x00\x00\xff\xff\x03\x72\x7f\x08\x23\x00\x00\x00")
|
||||
|
||||
func _1561368210_add_installation_metadataDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1561368210_add_installation_metadataDownSql,
|
||||
"1561368210_add_installation_metadata.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1561368210_add_installation_metadataDownSql() (*asset, error) {
|
||||
bytes, err := _1561368210_add_installation_metadataDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1561368210_add_installation_metadata.down.sql", size: 35, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xa8, 0xde, 0x3f, 0xd2, 0x4a, 0x50, 0x98, 0x56, 0xe3, 0xc0, 0xcd, 0x9d, 0xb0, 0x34, 0x3b, 0xe5, 0x62, 0x18, 0xb5, 0x20, 0xc9, 0x3e, 0xdc, 0x6a, 0x40, 0x36, 0x66, 0xea, 0x51, 0x8c, 0x71, 0xf5}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1561368210_add_installation_metadataUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\xce\xc1\x8a\x83\x30\x10\xc6\xf1\xbb\x4f\xf1\xdd\x54\xf0\x0d\xf6\x14\xb3\x23\x08\x21\xd9\x95\x04\x7a\x93\x60\x52\x08\xd5\x58\xe8\x50\xf0\xed\x8b\x87\x42\xed\xc1\xeb\xcc\xef\x83\xbf\x1c\x48\x58\x82\x15\xad\x22\xa4\xfc\x60\x3f\xcf\x9e\xd3\x9a\xc7\x25\xb2\x0f\x9e\x3d\x50\x15\x40\x0a\x31\x73\xe2\x0d\xad\x32\x2d\xb4\xb1\xd0\x4e\xa9\x66\xff\x7c\x8e\x52\x80\xa5\x8b\x3d\x80\xec\x97\x78\xbc\xe2\x97\x3a\xe1\x94\x45\x59\xee\x20\xc4\x67\x9a\xe2\xc8\xdb\xfd\xdc\x5d\xa7\x65\xe4\xf5\x16\xf3\xa9\x72\xba\xff\x77\x54\xbd\x83\x9b\xef\xc0\x1a\x46\x43\x1a\xdd\xa9\x5e\x5a\x0c\xf4\xa7\x84\xa4\xa2\xfe\x29\x5e\x01\x00\x00\xff\xff\x5d\x6f\xe6\xd3\x0b\x01\x00\x00")
|
||||
|
||||
func _1561368210_add_installation_metadataUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1561368210_add_installation_metadataUpSql,
|
||||
"1561368210_add_installation_metadata.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1561368210_add_installation_metadataUpSql() (*asset, error) {
|
||||
bytes, err := _1561368210_add_installation_metadataUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1561368210_add_installation_metadata.up.sql", size: 267, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xb4, 0x71, 0x8f, 0x29, 0xb1, 0xaa, 0xd6, 0xd1, 0x8c, 0x17, 0xef, 0x6c, 0xd5, 0x80, 0xb8, 0x2c, 0xc3, 0xfe, 0xec, 0x24, 0x4d, 0xc8, 0x25, 0xd3, 0xb4, 0xcd, 0xa9, 0xac, 0x63, 0x61, 0xb2, 0x9c}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1632236298_add_communitiesDownSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x09\xf2\x0f\x50\xf0\xf4\x73\x71\x8d\x50\xc8\x4c\xa9\x88\xcf\x48\x2c\xce\x88\x2f\x4a\x2c\x49\xce\x48\x2d\x89\x4f\xcd\x4b\xb6\xe6\x22\xa0\x20\x3e\x39\x31\x39\x23\xd5\x9a\x0b\xa2\x2e\xc4\xd1\xc9\xc7\x55\x01\x5d\x4d\x51\x65\x41\x49\x66\x7e\x1e\x4c\x29\x61\x95\xd6\x5c\x80\x00\x00\x00\xff\xff\xa4\x97\x4f\xad\x97\x00\x00\x00")
|
||||
|
||||
func _1632236298_add_communitiesDownSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1632236298_add_communitiesDownSql,
|
||||
"1632236298_add_communities.down.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1632236298_add_communitiesDownSql() (*asset, error) {
|
||||
bytes, err := _1632236298_add_communitiesDownSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1632236298_add_communities.down.sql", size: 151, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x26, 0xe5, 0x47, 0xd1, 0xe5, 0xec, 0x5b, 0x3e, 0xdc, 0x22, 0xf4, 0x27, 0xee, 0x70, 0xf3, 0x9, 0x4f, 0xd2, 0x9f, 0x92, 0xf, 0x5a, 0x18, 0x11, 0xb7, 0x40, 0xab, 0xf1, 0x98, 0x72, 0xd6, 0x60}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1632236298_add_communitiesUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x90\xdf\x4a\xc3\x30\x14\xc6\xef\xf3\x14\xe7\x72\x83\xbe\x81\x57\xed\x3c\x1b\xc1\x7a\xa2\x31\x05\x77\x15\x4a\x16\x4c\x10\xd3\xd9\x46\xb0\x6f\x2f\xc1\x0c\x71\x71\xd2\xdd\x7e\x9c\xf3\xfd\xf9\x6d\x24\xd6\x0a\x41\xd5\x4d\x8b\xe0\xfa\xc9\xe9\xb1\x8f\xc6\xd9\xa8\x6d\x30\xe3\x7c\x8c\x7e\x08\xb0\x62\x00\x2f\xe3\xf0\x71\xd4\xfe\x00\x4d\x2b\x1a\x20\xa1\x80\xba\xb6\xad\x18\xc0\xab\x9d\x93\xce\x49\x9d\xcb\xe5\xed\x83\xe4\xf7\xb5\xdc\xc3\x1d\xee\x57\x27\xc7\x2a\x3b\xac\xd9\xfa\x86\xb1\x5c\xa8\x23\xfe\xd8\x21\x70\xba\xc5\x67\xf0\x87\x4f\x7d\xde\x0d\x04\x5d\xea\x5b\x3a\xff\xf8\xfe\x3b\x54\x9b\xde\x38\xbb\x6c\xae\x0f\xf1\x97\x3c\xd9\x77\x1d\x86\x44\x01\x77\x28\x93\x92\x42\xca\xf7\x69\x7e\x4b\x89\xfa\x84\x27\x69\x5b\x21\x91\xef\xe8\x6f\x2a\x20\x71\x8b\x12\x69\x83\x4f\xcb\x07\x5f\x85\x32\xcf\xbe\x0c\xf4\xfb\xa0\x48\xa9\xf2\xe8\x94\xf5\x15\x00\x00\xff\xff\x61\x30\xb4\xa0\x48\x02\x00\x00")
|
||||
|
||||
func _1632236298_add_communitiesUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1632236298_add_communitiesUpSql,
|
||||
"1632236298_add_communities.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1632236298_add_communitiesUpSql() (*asset, error) {
|
||||
bytes, err := _1632236298_add_communitiesUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1632236298_add_communities.up.sql", size: 584, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0x8f, 0xe0, 0x1, 0x6e, 0x84, 0xc, 0x35, 0xe4, 0x5a, 0xf, 0xbe, 0xcb, 0xf7, 0xd2, 0xa8, 0x25, 0xf5, 0xdb, 0x7, 0xcb, 0xa3, 0xe6, 0xf4, 0xc4, 0x1b, 0xa5, 0xec, 0x32, 0x1e, 0x1e, 0x48, 0x60}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var __1636536507_add_index_bundlesUpSql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x72\x0e\x72\x75\x0c\x71\x55\xf0\xf4\x73\x71\x8d\x50\x48\xcd\x4b\x2e\xaa\x2c\x28\xc9\xcc\xcf\x8b\x4f\x2a\xcd\x4b\xc9\x49\x2d\x8e\x4f\xad\x28\xc8\x2c\x4a\x4d\x89\xcf\x4c\x49\xcd\x2b\xc9\x2c\xa9\x8c\xcf\xcc\x2b\x2e\x49\xcc\xc9\x49\x04\xab\xca\x4c\x89\x2f\x4b\x2d\x2a\xce\xcc\xcf\x53\xc8\xcf\x53\x80\xea\x51\xd0\x80\x6a\xd2\x51\x80\xe9\xd2\x51\x40\xd3\xa6\xa3\x00\xd5\xa7\x69\xcd\x45\xa9\x13\x50\xac\x26\xdd\xc6\xa2\xc4\x92\xe4\x8c\xd4\x92\xf8\xcc\xbc\xb4\xfc\xf8\x32\x23\xbc\xd6\xa0\xa9\xc5\x63\x9d\xa6\x35\x17\x20\x00\x00\xff\xff\xd4\xde\x07\x5c\x5b\x01\x00\x00")
|
||||
|
||||
func _1636536507_add_index_bundlesUpSqlBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
__1636536507_add_index_bundlesUpSql,
|
||||
"1636536507_add_index_bundles.up.sql",
|
||||
)
|
||||
}
|
||||
|
||||
func _1636536507_add_index_bundlesUpSql() (*asset, error) {
|
||||
bytes, err := _1636536507_add_index_bundlesUpSqlBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "1636536507_add_index_bundles.up.sql", size: 347, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xf1, 0xb9, 0x3c, 0x16, 0xfc, 0xfb, 0xb2, 0xb4, 0x3b, 0xfe, 0xdc, 0xf5, 0x9c, 0x42, 0xa0, 0xa0, 0xd4, 0xd, 0x5b, 0x97, 0x10, 0x80, 0x95, 0xe, 0x13, 0xc1, 0x18, 0x8, 0xee, 0xf, 0x99, 0xee}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
var _docGo = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x84\x8f\xbb\x6e\xc3\x30\x0c\x45\x77\x7f\xc5\x45\x96\x2c\xb5\xb4\x74\xea\xd6\xb1\x7b\x7f\x80\x91\x68\x89\x88\x1e\xae\x48\xe7\xf1\xf7\x85\xd3\x02\xcd\xd6\xf5\x00\xe7\xf0\xd2\x7b\x7c\x66\x51\x2c\x52\x18\xa2\x68\x1c\x58\x95\xc6\x1d\x27\x0e\xb4\x29\xe3\x90\xc4\xf2\x76\x72\xa1\x57\xaf\x46\xb6\xe9\x2c\xd5\x57\x49\x83\x8c\xfd\xe5\xf5\x30\x79\x8f\x40\xed\x68\xc8\xd4\x62\xe1\x47\x4b\xa1\x46\xc3\xa4\x25\x5c\xc5\x32\x08\xeb\xe0\x45\x6e\x0e\xef\x86\xc2\xa4\x06\xcb\x64\x47\x85\x65\x46\x20\xe5\x3d\xb3\xf4\x81\xd4\xe7\x93\xb4\x48\x46\x6e\x47\x1f\xcb\x13\xd9\x17\x06\x2a\x85\x23\x96\xd1\xeb\xc3\x55\xaa\x8c\x28\x83\x83\xf5\x71\x7f\x01\xa9\xb2\xa1\x51\x65\xdd\xfd\x4c\x17\x46\xeb\xbf\xe7\x41\x2d\xfe\xff\x11\xae\x7d\x9c\x15\xa4\xe0\xdb\xca\xc1\x38\xba\x69\x5a\x29\x9c\x29\x31\xf4\xab\x88\xf1\x34\x79\x9f\xfa\x5b\xe2\xc6\xbb\xf5\xbc\x71\x5e\xcf\x09\x3f\x35\xe9\x4d\x31\x77\x38\xe7\xff\x80\x4b\x1d\x6e\xfa\x0e\x00\x00\xff\xff\x9d\x60\x3d\x88\x79\x01\x00\x00")
|
||||
|
||||
func docGoBytes() ([]byte, error) {
|
||||
return bindataRead(
|
||||
_docGo,
|
||||
"doc.go",
|
||||
)
|
||||
}
|
||||
|
||||
func docGo() (*asset, error) {
|
||||
bytes, err := docGoBytes()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info := bindataFileInfo{name: "doc.go", size: 377, mode: os.FileMode(0644), modTime: time.Unix(1704739012, 0)}
|
||||
a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xef, 0xaf, 0xdf, 0xcf, 0x65, 0xae, 0x19, 0xfc, 0x9d, 0x29, 0xc1, 0x91, 0xaf, 0xb5, 0xd5, 0xb1, 0x56, 0xf3, 0xee, 0xa8, 0xba, 0x13, 0x65, 0xdb, 0xab, 0xcf, 0x4e, 0xac, 0x92, 0xe9, 0x60, 0xf1}}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// Asset loads and returns the asset for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func Asset(name string) ([]byte, error) {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
if f, ok := _bindata[canonicalName]; ok {
|
||||
a, err := f()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
|
||||
}
|
||||
return a.bytes, nil
|
||||
}
|
||||
return nil, fmt.Errorf("Asset %s not found", name)
|
||||
}
|
||||
|
||||
// AssetString returns the asset contents as a string (instead of a []byte).
|
||||
func AssetString(name string) (string, error) {
|
||||
data, err := Asset(name)
|
||||
return string(data), err
|
||||
}
|
||||
|
||||
// MustAsset is like Asset but panics when Asset would return an error.
|
||||
// It simplifies safe initialization of global variables.
|
||||
func MustAsset(name string) []byte {
|
||||
a, err := Asset(name)
|
||||
if err != nil {
|
||||
panic("asset: Asset(" + name + "): " + err.Error())
|
||||
}
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
// MustAssetString is like AssetString but panics when Asset would return an
|
||||
// error. It simplifies safe initialization of global variables.
|
||||
func MustAssetString(name string) string {
|
||||
return string(MustAsset(name))
|
||||
}
|
||||
|
||||
// AssetInfo loads and returns the asset info for the given name.
|
||||
// It returns an error if the asset could not be found or
|
||||
// could not be loaded.
|
||||
func AssetInfo(name string) (os.FileInfo, error) {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
if f, ok := _bindata[canonicalName]; ok {
|
||||
a, err := f()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
|
||||
}
|
||||
return a.info, nil
|
||||
}
|
||||
return nil, fmt.Errorf("AssetInfo %s not found", name)
|
||||
}
|
||||
|
||||
// AssetDigest returns the digest of the file with the given name. It returns an
|
||||
// error if the asset could not be found or the digest could not be loaded.
|
||||
func AssetDigest(name string) ([sha256.Size]byte, error) {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
if f, ok := _bindata[canonicalName]; ok {
|
||||
a, err := f()
|
||||
if err != nil {
|
||||
return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s can't read by error: %v", name, err)
|
||||
}
|
||||
return a.digest, nil
|
||||
}
|
||||
return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s not found", name)
|
||||
}
|
||||
|
||||
// Digests returns a map of all known files and their checksums.
|
||||
func Digests() (map[string][sha256.Size]byte, error) {
|
||||
mp := make(map[string][sha256.Size]byte, len(_bindata))
|
||||
for name := range _bindata {
|
||||
a, err := _bindata[name]()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mp[name] = a.digest
|
||||
}
|
||||
return mp, nil
|
||||
}
|
||||
|
||||
// AssetNames returns the names of the assets.
|
||||
func AssetNames() []string {
|
||||
names := make([]string, 0, len(_bindata))
|
||||
for name := range _bindata {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// _bindata is a table, holding each asset generator, mapped to its name.
|
||||
var _bindata = map[string]func() (*asset, error){
|
||||
"1536754952_initial_schema.down.sql": _1536754952_initial_schemaDownSql,
|
||||
"1536754952_initial_schema.up.sql": _1536754952_initial_schemaUpSql,
|
||||
"1539249977_update_ratchet_info.down.sql": _1539249977_update_ratchet_infoDownSql,
|
||||
"1539249977_update_ratchet_info.up.sql": _1539249977_update_ratchet_infoUpSql,
|
||||
"1540715431_add_version.down.sql": _1540715431_add_versionDownSql,
|
||||
"1540715431_add_version.up.sql": _1540715431_add_versionUpSql,
|
||||
"1541164797_add_installations.down.sql": _1541164797_add_installationsDownSql,
|
||||
"1541164797_add_installations.up.sql": _1541164797_add_installationsUpSql,
|
||||
"1558084410_add_secret.down.sql": _1558084410_add_secretDownSql,
|
||||
"1558084410_add_secret.up.sql": _1558084410_add_secretUpSql,
|
||||
"1558588866_add_version.down.sql": _1558588866_add_versionDownSql,
|
||||
"1558588866_add_version.up.sql": _1558588866_add_versionUpSql,
|
||||
"1559627659_add_contact_code.down.sql": _1559627659_add_contact_codeDownSql,
|
||||
"1559627659_add_contact_code.up.sql": _1559627659_add_contact_codeUpSql,
|
||||
"1561368210_add_installation_metadata.down.sql": _1561368210_add_installation_metadataDownSql,
|
||||
"1561368210_add_installation_metadata.up.sql": _1561368210_add_installation_metadataUpSql,
|
||||
"1632236298_add_communities.down.sql": _1632236298_add_communitiesDownSql,
|
||||
"1632236298_add_communities.up.sql": _1632236298_add_communitiesUpSql,
|
||||
"1636536507_add_index_bundles.up.sql": _1636536507_add_index_bundlesUpSql,
|
||||
"doc.go": docGo,
|
||||
}
|
||||
|
||||
// AssetDebug is true if the assets were built with the debug flag enabled.
|
||||
const AssetDebug = false
|
||||
|
||||
// AssetDir returns the file names below a certain
|
||||
// directory embedded in the file by go-bindata.
|
||||
// For example if you run go-bindata on data/... and data contains the
|
||||
// following hierarchy:
|
||||
//
|
||||
// data/
|
||||
// foo.txt
|
||||
// img/
|
||||
// a.png
|
||||
// b.png
|
||||
//
|
||||
// then AssetDir("data") would return []string{"foo.txt", "img"},
|
||||
// AssetDir("data/img") would return []string{"a.png", "b.png"},
|
||||
// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and
|
||||
// AssetDir("") will return []string{"data"}.
|
||||
func AssetDir(name string) ([]string, error) {
|
||||
node := _bintree
|
||||
if len(name) != 0 {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
pathList := strings.Split(canonicalName, "/")
|
||||
for _, p := range pathList {
|
||||
node = node.Children[p]
|
||||
if node == nil {
|
||||
return nil, fmt.Errorf("Asset %s not found", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
if node.Func != nil {
|
||||
return nil, fmt.Errorf("Asset %s not found", name)
|
||||
}
|
||||
rv := make([]string, 0, len(node.Children))
|
||||
for childName := range node.Children {
|
||||
rv = append(rv, childName)
|
||||
}
|
||||
return rv, nil
|
||||
}
|
||||
|
||||
type bintree struct {
|
||||
Func func() (*asset, error)
|
||||
Children map[string]*bintree
|
||||
}
|
||||
|
||||
var _bintree = &bintree{nil, map[string]*bintree{
|
||||
"1536754952_initial_schema.down.sql": {_1536754952_initial_schemaDownSql, map[string]*bintree{}},
|
||||
"1536754952_initial_schema.up.sql": {_1536754952_initial_schemaUpSql, map[string]*bintree{}},
|
||||
"1539249977_update_ratchet_info.down.sql": {_1539249977_update_ratchet_infoDownSql, map[string]*bintree{}},
|
||||
"1539249977_update_ratchet_info.up.sql": {_1539249977_update_ratchet_infoUpSql, map[string]*bintree{}},
|
||||
"1540715431_add_version.down.sql": {_1540715431_add_versionDownSql, map[string]*bintree{}},
|
||||
"1540715431_add_version.up.sql": {_1540715431_add_versionUpSql, map[string]*bintree{}},
|
||||
"1541164797_add_installations.down.sql": {_1541164797_add_installationsDownSql, map[string]*bintree{}},
|
||||
"1541164797_add_installations.up.sql": {_1541164797_add_installationsUpSql, map[string]*bintree{}},
|
||||
"1558084410_add_secret.down.sql": {_1558084410_add_secretDownSql, map[string]*bintree{}},
|
||||
"1558084410_add_secret.up.sql": {_1558084410_add_secretUpSql, map[string]*bintree{}},
|
||||
"1558588866_add_version.down.sql": {_1558588866_add_versionDownSql, map[string]*bintree{}},
|
||||
"1558588866_add_version.up.sql": {_1558588866_add_versionUpSql, map[string]*bintree{}},
|
||||
"1559627659_add_contact_code.down.sql": {_1559627659_add_contact_codeDownSql, map[string]*bintree{}},
|
||||
"1559627659_add_contact_code.up.sql": {_1559627659_add_contact_codeUpSql, map[string]*bintree{}},
|
||||
"1561368210_add_installation_metadata.down.sql": {_1561368210_add_installation_metadataDownSql, map[string]*bintree{}},
|
||||
"1561368210_add_installation_metadata.up.sql": {_1561368210_add_installation_metadataUpSql, map[string]*bintree{}},
|
||||
"1632236298_add_communities.down.sql": {_1632236298_add_communitiesDownSql, map[string]*bintree{}},
|
||||
"1632236298_add_communities.up.sql": {_1632236298_add_communitiesUpSql, map[string]*bintree{}},
|
||||
"1636536507_add_index_bundles.up.sql": {_1636536507_add_index_bundlesUpSql, map[string]*bintree{}},
|
||||
"doc.go": {docGo, map[string]*bintree{}},
|
||||
}}
|
||||
|
||||
// RestoreAsset restores an asset under the given directory.
|
||||
func RestoreAsset(dir, name string) error {
|
||||
data, err := Asset(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
info, err := AssetInfo(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = os.WriteFile(_filePath(dir, name), data, info.Mode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
|
||||
}
|
||||
|
||||
// RestoreAssets restores an asset under the given directory recursively.
|
||||
func RestoreAssets(dir, name string) error {
|
||||
children, err := AssetDir(name)
|
||||
// File
|
||||
if err != nil {
|
||||
return RestoreAsset(dir, name)
|
||||
}
|
||||
// Dir
|
||||
for _, child := range children {
|
||||
err = RestoreAssets(dir, filepath.Join(name, child))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func _filePath(dir, name string) string {
|
||||
canonicalName := strings.Replace(name, "\\", "/", -1)
|
||||
return filepath.Join(append([]string{dir}, strings.Split(canonicalName, "/")...)...)
|
||||
}
|
||||
125
vendor/github.com/status-im/status-go/protocol/encryption/multidevice/multidevice.go
generated
vendored
Normal file
125
vendor/github.com/status-im/status-go/protocol/encryption/multidevice/multidevice.go
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
package multidevice
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"database/sql"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
)
|
||||
|
||||
type InstallationMetadata struct {
|
||||
// The name of the device
|
||||
Name string `json:"name"`
|
||||
// The type of device
|
||||
DeviceType string `json:"deviceType"`
|
||||
// The FCMToken for mobile devices
|
||||
FCMToken string `json:"fcmToken"`
|
||||
}
|
||||
|
||||
type Installation struct {
|
||||
// Identity is the string identity of the owner
|
||||
Identity string `json:"identity"`
|
||||
// The installation-id of the device
|
||||
ID string `json:"id"`
|
||||
// The last known protocol version of the device
|
||||
Version uint32 `json:"version"`
|
||||
// Enabled is whether the installation is enabled
|
||||
Enabled bool `json:"enabled"`
|
||||
// Timestamp is the last time we saw this device
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
// InstallationMetadata
|
||||
InstallationMetadata *InstallationMetadata `json:"metadata"`
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
MaxInstallations int
|
||||
ProtocolVersion uint32
|
||||
InstallationID string
|
||||
}
|
||||
|
||||
type Multidevice struct {
|
||||
persistence *sqlitePersistence
|
||||
config *Config
|
||||
}
|
||||
|
||||
func New(db *sql.DB, config *Config) *Multidevice {
|
||||
return &Multidevice{
|
||||
config: config,
|
||||
persistence: newSQLitePersistence(db),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Multidevice) InstallationID() string {
|
||||
return s.config.InstallationID
|
||||
}
|
||||
|
||||
func (s *Multidevice) GetActiveInstallations(identity *ecdsa.PublicKey) ([]*Installation, error) {
|
||||
identityC := crypto.CompressPubkey(identity)
|
||||
return s.persistence.GetActiveInstallations(s.config.MaxInstallations, identityC)
|
||||
}
|
||||
|
||||
func (s *Multidevice) GetOurActiveInstallations(identity *ecdsa.PublicKey) ([]*Installation, error) {
|
||||
identityC := crypto.CompressPubkey(identity)
|
||||
installations, err := s.persistence.GetActiveInstallations(s.config.MaxInstallations-1, identityC)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
installations = append(installations, &Installation{
|
||||
ID: s.config.InstallationID,
|
||||
Version: s.config.ProtocolVersion,
|
||||
})
|
||||
|
||||
return installations, nil
|
||||
}
|
||||
|
||||
func (s *Multidevice) GetOurInstallations(identity *ecdsa.PublicKey) ([]*Installation, error) {
|
||||
var found bool
|
||||
identityC := crypto.CompressPubkey(identity)
|
||||
installations, err := s.persistence.GetInstallations(identityC)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, installation := range installations {
|
||||
if installation.ID == s.config.InstallationID {
|
||||
found = true
|
||||
installation.Enabled = true
|
||||
installation.Version = s.config.ProtocolVersion
|
||||
}
|
||||
|
||||
}
|
||||
if !found {
|
||||
installations = append(installations, &Installation{
|
||||
ID: s.config.InstallationID,
|
||||
Enabled: true,
|
||||
Version: s.config.ProtocolVersion,
|
||||
})
|
||||
}
|
||||
|
||||
return installations, nil
|
||||
}
|
||||
|
||||
func (s *Multidevice) AddInstallations(identity []byte, timestamp int64, installations []*Installation, defaultEnabled bool) ([]*Installation, error) {
|
||||
return s.persistence.AddInstallations(identity, timestamp, installations, defaultEnabled)
|
||||
}
|
||||
|
||||
func (s *Multidevice) SetInstallationMetadata(identity *ecdsa.PublicKey, installationID string, metadata *InstallationMetadata) error {
|
||||
identityC := crypto.CompressPubkey(identity)
|
||||
return s.persistence.SetInstallationMetadata(identityC, installationID, metadata)
|
||||
}
|
||||
|
||||
func (s *Multidevice) SetInstallationName(identity *ecdsa.PublicKey, installationID string, name string) error {
|
||||
identityC := crypto.CompressPubkey(identity)
|
||||
return s.persistence.SetInstallationName(identityC, installationID, name)
|
||||
}
|
||||
|
||||
func (s *Multidevice) EnableInstallation(identity *ecdsa.PublicKey, installationID string) error {
|
||||
identityC := crypto.CompressPubkey(identity)
|
||||
return s.persistence.EnableInstallation(identityC, installationID)
|
||||
}
|
||||
|
||||
func (s *Multidevice) DisableInstallation(myIdentityKey *ecdsa.PublicKey, installationID string) error {
|
||||
myIdentityKeyC := crypto.CompressPubkey(myIdentityKey)
|
||||
return s.persistence.DisableInstallation(myIdentityKeyC, installationID)
|
||||
}
|
||||
278
vendor/github.com/status-im/status-go/protocol/encryption/multidevice/persistence.go
generated
vendored
Normal file
278
vendor/github.com/status-im/status-go/protocol/encryption/multidevice/persistence.go
generated
vendored
Normal file
@@ -0,0 +1,278 @@
|
||||
package multidevice
|
||||
|
||||
import "database/sql"
|
||||
|
||||
type sqlitePersistence struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func newSQLitePersistence(db *sql.DB) *sqlitePersistence {
|
||||
return &sqlitePersistence{db: db}
|
||||
}
|
||||
|
||||
// GetActiveInstallations returns the active installations for a given identity
|
||||
func (s *sqlitePersistence) GetActiveInstallations(maxInstallations int, identity []byte) ([]*Installation, error) {
|
||||
stmt, err := s.db.Prepare(`SELECT installation_id, version
|
||||
FROM installations
|
||||
WHERE enabled = 1 AND identity = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var installations []*Installation
|
||||
rows, err := stmt.Query(identity, maxInstallations)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var (
|
||||
installationID string
|
||||
version uint32
|
||||
)
|
||||
err = rows.Scan(
|
||||
&installationID,
|
||||
&version,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
installations = append(installations, &Installation{
|
||||
ID: installationID,
|
||||
Version: version,
|
||||
Enabled: true,
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
return installations, nil
|
||||
|
||||
}
|
||||
|
||||
// GetInstallations returns all the installations for a given identity
|
||||
// we both return the installations & the metadata
|
||||
// metadata is currently stored in a separate table, as in some cases we
|
||||
// might have metadata for a device, but no other information on the device
|
||||
func (s *sqlitePersistence) GetInstallations(identity []byte) ([]*Installation, error) {
|
||||
installationMap := make(map[string]*Installation)
|
||||
var installations []*Installation
|
||||
|
||||
// We query both tables as sqlite does not support full outer joins
|
||||
installationsStmt, err := s.db.Prepare(`SELECT installation_id, version, enabled, timestamp FROM installations WHERE identity = ?`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer installationsStmt.Close()
|
||||
|
||||
installationRows, err := installationsStmt.Query(identity)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for installationRows.Next() {
|
||||
var installation Installation
|
||||
err = installationRows.Scan(
|
||||
&installation.ID,
|
||||
&installation.Version,
|
||||
&installation.Enabled,
|
||||
&installation.Timestamp,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// We initialized to empty in this case as we want to
|
||||
// return metadata as well in this endpoint, but not in others
|
||||
installation.InstallationMetadata = &InstallationMetadata{}
|
||||
installationMap[installation.ID] = &installation
|
||||
}
|
||||
|
||||
metadataStmt, err := s.db.Prepare(`SELECT installation_id, name, device_type, fcm_token FROM installation_metadata WHERE identity = ?`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer metadataStmt.Close()
|
||||
|
||||
metadataRows, err := metadataStmt.Query(identity)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for metadataRows.Next() {
|
||||
var (
|
||||
installationID string
|
||||
name sql.NullString
|
||||
deviceType sql.NullString
|
||||
fcmToken sql.NullString
|
||||
installation *Installation
|
||||
)
|
||||
err = metadataRows.Scan(
|
||||
&installationID,
|
||||
&name,
|
||||
&deviceType,
|
||||
&fcmToken,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, ok := installationMap[installationID]; ok {
|
||||
installation = installationMap[installationID]
|
||||
} else {
|
||||
installation = &Installation{ID: installationID}
|
||||
}
|
||||
installation.InstallationMetadata = &InstallationMetadata{
|
||||
Name: name.String,
|
||||
DeviceType: deviceType.String,
|
||||
FCMToken: fcmToken.String,
|
||||
}
|
||||
installationMap[installationID] = installation
|
||||
}
|
||||
|
||||
for _, installation := range installationMap {
|
||||
installations = append(installations, installation)
|
||||
}
|
||||
|
||||
return installations, nil
|
||||
}
|
||||
|
||||
// AddInstallations adds the installations for a given identity, maintaining the enabled flag
|
||||
func (s *sqlitePersistence) AddInstallations(identity []byte, timestamp int64, installations []*Installation, defaultEnabled bool) ([]*Installation, error) {
|
||||
tx, err := s.db.Begin()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var insertedInstallations []*Installation
|
||||
|
||||
for _, installation := range installations {
|
||||
stmt, err := tx.Prepare(`SELECT enabled, version
|
||||
FROM installations
|
||||
WHERE identity = ? AND installation_id = ?
|
||||
LIMIT 1`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
var oldEnabled bool
|
||||
// We don't override version once we saw one
|
||||
var oldVersion uint32
|
||||
latestVersion := installation.Version
|
||||
|
||||
err = stmt.QueryRow(identity, installation.ID).Scan(&oldEnabled, &oldVersion)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
stmt, err = tx.Prepare(`INSERT INTO installations(identity, installation_id, timestamp, enabled, version)
|
||||
VALUES (?, ?, ?, ?, ?)`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
_, err = stmt.Exec(
|
||||
identity,
|
||||
installation.ID,
|
||||
timestamp,
|
||||
defaultEnabled,
|
||||
latestVersion,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
insertedInstallations = append(insertedInstallations, installation)
|
||||
} else {
|
||||
// We update timestamp if present without changing enabled, only if this is a new bundle
|
||||
// and we set the version to the latest we ever saw
|
||||
if oldVersion > installation.Version {
|
||||
latestVersion = oldVersion
|
||||
}
|
||||
|
||||
stmt, err = tx.Prepare(`UPDATE installations
|
||||
SET timestamp = ?, enabled = ?, version = ?
|
||||
WHERE identity = ?
|
||||
AND installation_id = ?
|
||||
AND timestamp < ?`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
_, err = stmt.Exec(
|
||||
timestamp,
|
||||
oldEnabled,
|
||||
latestVersion,
|
||||
identity,
|
||||
installation.ID,
|
||||
timestamp,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
_ = tx.Rollback()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return insertedInstallations, nil
|
||||
|
||||
}
|
||||
|
||||
// EnableInstallation enables the installation
|
||||
func (s *sqlitePersistence) EnableInstallation(identity []byte, installationID string) error {
|
||||
stmt, err := s.db.Prepare(`UPDATE installations
|
||||
SET enabled = 1
|
||||
WHERE identity = ? AND installation_id = ?`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = stmt.Exec(identity, installationID)
|
||||
return err
|
||||
|
||||
}
|
||||
|
||||
// DisableInstallation disable the installation
|
||||
func (s *sqlitePersistence) DisableInstallation(identity []byte, installationID string) error {
|
||||
stmt, err := s.db.Prepare(`UPDATE installations
|
||||
SET enabled = 0
|
||||
WHERE identity = ? AND installation_id = ?`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = stmt.Exec(identity, installationID)
|
||||
return err
|
||||
}
|
||||
|
||||
// SetInstallationMetadata sets the metadata for a given installation
|
||||
func (s *sqlitePersistence) SetInstallationMetadata(identity []byte, installationID string, metadata *InstallationMetadata) error {
|
||||
stmt, err := s.db.Prepare(`INSERT INTO installation_metadata(name, device_type, fcm_token, identity, installation_id) VALUES(?,?,?,?,?)`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = stmt.Exec(metadata.Name, metadata.DeviceType, metadata.FCMToken, identity, installationID)
|
||||
return err
|
||||
}
|
||||
|
||||
// SetInstallationName sets the only the name in metadata for a given installation
|
||||
func (s *sqlitePersistence) SetInstallationName(identity []byte, installationID string, name string) error {
|
||||
stmt, err := s.db.Prepare(`UPDATE installation_metadata
|
||||
SET name = ?
|
||||
WHERE identity = ? AND installation_id = ?`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = stmt.Exec(name, identity, installationID)
|
||||
return err
|
||||
}
|
||||
1005
vendor/github.com/status-im/status-go/protocol/encryption/persistence.go
generated
vendored
Normal file
1005
vendor/github.com/status-im/status-go/protocol/encryption/persistence.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
824
vendor/github.com/status-im/status-go/protocol/encryption/protocol.go
generated
vendored
Normal file
824
vendor/github.com/status-im/status-go/protocol/encryption/protocol.go
generated
vendored
Normal file
@@ -0,0 +1,824 @@
|
||||
package encryption
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/ecdsa"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
|
||||
"github.com/status-im/status-go/protocol/encryption/multidevice"
|
||||
"github.com/status-im/status-go/protocol/encryption/publisher"
|
||||
"github.com/status-im/status-go/protocol/encryption/sharedsecret"
|
||||
)
|
||||
|
||||
//go:generate protoc --go_out=. ./protocol_message.proto
|
||||
|
||||
const (
|
||||
protocolVersion = 1
|
||||
sharedSecretNegotiationVersion = 1
|
||||
partitionedTopicMinVersion = 1
|
||||
defaultMinVersion = 0
|
||||
)
|
||||
|
||||
type PartitionTopicMode int
|
||||
|
||||
const (
|
||||
PartitionTopicNoSupport PartitionTopicMode = iota
|
||||
PartitionTopicV1
|
||||
)
|
||||
|
||||
type ProtocolMessageSpec struct {
|
||||
Message *ProtocolMessage
|
||||
// Installations is the targeted devices
|
||||
Installations []*multidevice.Installation
|
||||
// SharedSecret is a shared secret established among the installations
|
||||
SharedSecret *sharedsecret.Secret
|
||||
// AgreedSecret indicates whether the shared secret has been agreed
|
||||
AgreedSecret bool
|
||||
// Public means that the spec contains a public wrapped message
|
||||
Public bool
|
||||
}
|
||||
|
||||
func (p *ProtocolMessageSpec) MinVersion() uint32 {
|
||||
if len(p.Installations) == 0 {
|
||||
return defaultMinVersion
|
||||
}
|
||||
|
||||
version := p.Installations[0].Version
|
||||
|
||||
for _, installation := range p.Installations[1:] {
|
||||
if installation.Version < version {
|
||||
version = installation.Version
|
||||
}
|
||||
}
|
||||
return version
|
||||
}
|
||||
|
||||
func (p *ProtocolMessageSpec) PartitionedTopicMode() PartitionTopicMode {
|
||||
if p.MinVersion() >= partitionedTopicMinVersion {
|
||||
return PartitionTopicV1
|
||||
}
|
||||
return PartitionTopicNoSupport
|
||||
}
|
||||
|
||||
type Protocol struct {
|
||||
encryptor *encryptor
|
||||
secret *sharedsecret.SharedSecret
|
||||
multidevice *multidevice.Multidevice
|
||||
publisher *publisher.Publisher
|
||||
subscriptions *Subscriptions
|
||||
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrNoPayload means that there was no payload found in the received protocol message.
|
||||
ErrNoPayload = errors.New("no payload")
|
||||
ErrNoRatchetKey = errors.New("no ratchet key for given keyID")
|
||||
)
|
||||
|
||||
// New creates a new ProtocolService instance
|
||||
func New(
|
||||
db *sql.DB,
|
||||
installationID string,
|
||||
logger *zap.Logger,
|
||||
) *Protocol {
|
||||
return NewWithEncryptorConfig(
|
||||
db,
|
||||
installationID,
|
||||
defaultEncryptorConfig(installationID, logger),
|
||||
logger,
|
||||
)
|
||||
}
|
||||
|
||||
// DB and migrations are shared between encryption package
|
||||
// and its sub-packages.
|
||||
func NewWithEncryptorConfig(
|
||||
db *sql.DB,
|
||||
installationID string,
|
||||
encryptorConfig encryptorConfig,
|
||||
logger *zap.Logger,
|
||||
) *Protocol {
|
||||
return &Protocol{
|
||||
encryptor: newEncryptor(db, encryptorConfig),
|
||||
secret: sharedsecret.New(db, logger),
|
||||
multidevice: multidevice.New(db, &multidevice.Config{
|
||||
MaxInstallations: 3,
|
||||
ProtocolVersion: protocolVersion,
|
||||
InstallationID: installationID,
|
||||
}),
|
||||
publisher: publisher.New(logger),
|
||||
logger: logger.With(zap.Namespace("Protocol")),
|
||||
}
|
||||
}
|
||||
|
||||
type Subscriptions struct {
|
||||
SharedSecrets []*sharedsecret.Secret
|
||||
SendContactCode <-chan struct{}
|
||||
Quit chan struct{}
|
||||
}
|
||||
|
||||
func (p *Protocol) Start(myIdentity *ecdsa.PrivateKey) (*Subscriptions, error) {
|
||||
// Propagate currently cached shared secrets.
|
||||
secrets, err := p.secret.All()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get all secrets")
|
||||
}
|
||||
p.subscriptions = &Subscriptions{
|
||||
SharedSecrets: secrets,
|
||||
SendContactCode: p.publisher.Start(),
|
||||
Quit: make(chan struct{}),
|
||||
}
|
||||
return p.subscriptions, nil
|
||||
}
|
||||
|
||||
func (p *Protocol) Stop() error {
|
||||
p.publisher.Stop()
|
||||
if p.subscriptions != nil {
|
||||
close(p.subscriptions.Quit)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Protocol) addBundle(myIdentityKey *ecdsa.PrivateKey, msg *ProtocolMessage) error {
|
||||
// Get a bundle
|
||||
installations, err := p.multidevice.GetOurActiveInstallations(&myIdentityKey.PublicKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bundle, err := p.encryptor.CreateBundle(myIdentityKey, installations)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
msg.Bundles = []*Bundle{bundle}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// BuildPublicMessage marshals a public chat message given the user identity private key and a payload
|
||||
func (p *Protocol) BuildPublicMessage(myIdentityKey *ecdsa.PrivateKey, payload []byte) (*ProtocolMessageSpec, error) {
|
||||
// Build message not encrypted
|
||||
message := &ProtocolMessage{
|
||||
InstallationId: p.encryptor.config.InstallationID,
|
||||
PublicMessage: payload,
|
||||
}
|
||||
|
||||
err := p.addBundle(myIdentityKey, message)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ProtocolMessageSpec{Message: message, Public: true}, nil
|
||||
}
|
||||
|
||||
// BuildEncryptedMessage returns a 1:1 chat message and optionally a negotiated topic given the user identity private key, the recipient's public key, and a payload
|
||||
func (p *Protocol) BuildEncryptedMessage(myIdentityKey *ecdsa.PrivateKey, publicKey *ecdsa.PublicKey, payload []byte) (*ProtocolMessageSpec, error) {
|
||||
|
||||
// Get recipients installations.
|
||||
activeInstallations, err := p.multidevice.GetActiveInstallations(publicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Encrypt payload
|
||||
encryptedMessagesByInstalls, installations, err := p.encryptor.EncryptPayload(publicKey, myIdentityKey, activeInstallations, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build message
|
||||
message := &ProtocolMessage{
|
||||
InstallationId: p.encryptor.config.InstallationID,
|
||||
EncryptedMessage: encryptedMessagesByInstalls,
|
||||
}
|
||||
|
||||
err = p.addBundle(myIdentityKey, message)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check who we are sending the message to, and see if we have a shared secret
|
||||
// across devices
|
||||
var installationIDs []string
|
||||
for installationID := range message.GetEncryptedMessage() {
|
||||
if installationID != noInstallationID {
|
||||
installationIDs = append(installationIDs, installationID)
|
||||
}
|
||||
}
|
||||
|
||||
sharedSecret, agreed, err := p.secret.Agreed(myIdentityKey, p.encryptor.config.InstallationID, publicKey, installationIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
spec := &ProtocolMessageSpec{
|
||||
SharedSecret: sharedSecret,
|
||||
AgreedSecret: agreed,
|
||||
Message: message,
|
||||
Installations: installations,
|
||||
}
|
||||
return spec, nil
|
||||
}
|
||||
|
||||
func (p *Protocol) GenerateHashRatchetKey(groupID []byte) (*HashRatchetKeyCompatibility, error) {
|
||||
return p.encryptor.GenerateHashRatchetKey(groupID)
|
||||
}
|
||||
|
||||
func (p *Protocol) GetAllHREncodedKeys(groupID []byte) ([]byte, error) {
|
||||
keys, err := p.encryptor.persistence.GetKeysForGroup(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(keys) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return p.GetMarshaledHREncodedKeys(groupID, keys)
|
||||
}
|
||||
|
||||
// GetKeyIDsForGroup returns a slice of key IDs belonging to a given group ID
|
||||
func (p *Protocol) GetKeysForGroup(groupID []byte) ([]*HashRatchetKeyCompatibility, error) {
|
||||
return p.encryptor.persistence.GetKeysForGroup(groupID)
|
||||
}
|
||||
|
||||
func (p *Protocol) GetHREncodedKeys(groupID []byte, ratchets []*HashRatchetKeyCompatibility) *HRKeys {
|
||||
keys := &HRKeys{}
|
||||
for _, ratchet := range ratchets {
|
||||
key := &HRKey{
|
||||
DeprecatedKeyId: ratchet.DeprecatedKeyID(),
|
||||
Key: ratchet.Key,
|
||||
Timestamp: ratchet.Timestamp,
|
||||
}
|
||||
keys.Keys = append(keys.Keys, key)
|
||||
}
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
func (p *Protocol) GetMarshaledHREncodedKeys(groupID []byte, ratchets []*HashRatchetKeyCompatibility) ([]byte, error) {
|
||||
keys := p.GetHREncodedKeys(groupID, ratchets)
|
||||
return proto.Marshal(keys)
|
||||
}
|
||||
|
||||
// BuildHashRatchetRekeyGroup builds a public message
|
||||
// with the new key
|
||||
func (p *Protocol) BuildHashRatchetReKeyGroupMessage(myIdentityKey *ecdsa.PrivateKey, recipients []*ecdsa.PublicKey, groupID []byte, payload []byte, ratchet *HashRatchetKeyCompatibility) (*ProtocolMessageSpec, error) {
|
||||
|
||||
var err error
|
||||
if ratchet == nil {
|
||||
ratchet, err = p.GenerateHashRatchetKey(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
message, err := buildGroupRekeyMessage(myIdentityKey, groupID, ratchet.Timestamp, ratchet.Key, recipients)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
keys := &HRKeys{
|
||||
RekeyGroup: message,
|
||||
}
|
||||
spec := &ProtocolMessageSpec{
|
||||
Public: true,
|
||||
Message: &ProtocolMessage{
|
||||
InstallationId: p.encryptor.config.InstallationID,
|
||||
EncryptedMessage: map[string]*EncryptedMessageProtocol{noInstallationID: &EncryptedMessageProtocol{
|
||||
HRHeader: &HRHeader{
|
||||
SeqNo: 0,
|
||||
GroupId: groupID,
|
||||
Keys: keys,
|
||||
},
|
||||
Payload: payload,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return spec, nil
|
||||
}
|
||||
|
||||
// BuildHashRatchetKeyExchangeMessage builds a 1:1 message
|
||||
// containing newly generated hash ratchet key
|
||||
func (p *Protocol) BuildHashRatchetKeyExchangeMessage(myIdentityKey *ecdsa.PrivateKey, publicKey *ecdsa.PublicKey, groupID []byte, ratchets []*HashRatchetKeyCompatibility) (*ProtocolMessageSpec, error) {
|
||||
|
||||
keys := p.GetHREncodedKeys(groupID, ratchets)
|
||||
|
||||
encodedKeys, err := proto.Marshal(keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response, err := p.BuildEncryptedMessage(myIdentityKey, publicKey, encodedKeys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Loop through installations and assign HRHeader
|
||||
// SeqNo=0 has a special meaning for HandleMessage
|
||||
// and signifies a message with hash ratchet key payload
|
||||
for _, v := range response.Message.EncryptedMessage {
|
||||
v.HRHeader = &HRHeader{
|
||||
SeqNo: 0,
|
||||
GroupId: groupID,
|
||||
Keys: keys,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return response, err
|
||||
}
|
||||
|
||||
func (p *Protocol) BuildHashRatchetKeyExchangeMessageWithPayload(myIdentityKey *ecdsa.PrivateKey, publicKey *ecdsa.PublicKey, groupID []byte, ratchets []*HashRatchetKeyCompatibility, payload []byte) (*ProtocolMessageSpec, error) {
|
||||
|
||||
keys := p.GetHREncodedKeys(groupID, ratchets)
|
||||
|
||||
response, err := p.BuildEncryptedMessage(myIdentityKey, publicKey, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Loop through installations and assign HRHeader
|
||||
// SeqNo=0 has a special meaning for HandleMessage
|
||||
// and signifies a message with hash ratchet key payload
|
||||
for _, v := range response.Message.EncryptedMessage {
|
||||
v.HRHeader = &HRHeader{
|
||||
SeqNo: 0,
|
||||
GroupId: groupID,
|
||||
Keys: keys,
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return response, err
|
||||
}
|
||||
|
||||
func (p *Protocol) GetCurrentKeyForGroup(groupID []byte) (*HashRatchetKeyCompatibility, error) {
|
||||
return p.encryptor.persistence.GetCurrentKeyForGroup(groupID)
|
||||
|
||||
}
|
||||
|
||||
// BuildHashRatchetMessage returns a hash ratchet chat message
|
||||
func (p *Protocol) BuildHashRatchetMessage(groupID []byte, payload []byte) (*ProtocolMessageSpec, error) {
|
||||
|
||||
ratchet, err := p.encryptor.persistence.GetCurrentKeyForGroup(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Encrypt payload
|
||||
encryptedMessagesByInstalls, err := p.encryptor.EncryptHashRatchetPayload(ratchet, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build message
|
||||
message := &ProtocolMessage{
|
||||
InstallationId: p.encryptor.config.InstallationID,
|
||||
EncryptedMessage: encryptedMessagesByInstalls,
|
||||
}
|
||||
|
||||
spec := &ProtocolMessageSpec{
|
||||
Message: message,
|
||||
}
|
||||
return spec, nil
|
||||
}
|
||||
|
||||
func (p *Protocol) GetKeyExMessageSpecs(groupID []byte, identity *ecdsa.PrivateKey, recipients []*ecdsa.PublicKey, forceRekey bool) ([]*ProtocolMessageSpec, error) {
|
||||
var ratchets []*HashRatchetKeyCompatibility
|
||||
var err error
|
||||
if !forceRekey {
|
||||
ratchets, err = p.encryptor.persistence.GetKeysForGroup(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if len(ratchets) == 0 || forceRekey {
|
||||
ratchet, err := p.GenerateHashRatchetKey(groupID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ratchets = []*HashRatchetKeyCompatibility{ratchet}
|
||||
}
|
||||
specs := make([]*ProtocolMessageSpec, len(recipients))
|
||||
for i, recipient := range recipients {
|
||||
keyExMsg, err := p.BuildHashRatchetKeyExchangeMessage(identity, recipient, groupID, ratchets)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
specs[i] = keyExMsg
|
||||
|
||||
}
|
||||
|
||||
return specs, nil
|
||||
}
|
||||
|
||||
// BuildDHMessage builds a message with DH encryption so that it can be decrypted by any other device.
|
||||
func (p *Protocol) BuildDHMessage(myIdentityKey *ecdsa.PrivateKey, destination *ecdsa.PublicKey, payload []byte) (*ProtocolMessageSpec, error) {
|
||||
// Encrypt payload
|
||||
encryptionResponse, err := p.encryptor.EncryptPayloadWithDH(destination, payload)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build message
|
||||
message := &ProtocolMessage{
|
||||
InstallationId: p.encryptor.config.InstallationID,
|
||||
EncryptedMessage: encryptionResponse,
|
||||
}
|
||||
|
||||
err = p.addBundle(myIdentityKey, message)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &ProtocolMessageSpec{Message: message}, nil
|
||||
}
|
||||
|
||||
// ProcessPublicBundle processes a received X3DH bundle.
|
||||
func (p *Protocol) ProcessPublicBundle(myIdentityKey *ecdsa.PrivateKey, bundle *Bundle) ([]*multidevice.Installation, error) {
|
||||
logger := p.logger.With(zap.String("site", "ProcessPublicBundle"))
|
||||
|
||||
if err := p.encryptor.ProcessPublicBundle(myIdentityKey, bundle); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
installations, enabled, err := p.recoverInstallationsFromBundle(myIdentityKey, bundle)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO(adam): why do we add installations using identity obtained from GetIdentity()
|
||||
// instead of the output of crypto.CompressPubkey()? I tried the second option
|
||||
// and the unit tests TestTopic and TestMaxDevices fail.
|
||||
identityFromBundle := bundle.GetIdentity()
|
||||
theirIdentity, err := ExtractIdentity(bundle)
|
||||
if err != nil {
|
||||
logger.Panic("unrecoverable error extracting identity", zap.Error(err))
|
||||
}
|
||||
compressedIdentity := crypto.CompressPubkey(theirIdentity)
|
||||
if !bytes.Equal(identityFromBundle, compressedIdentity) {
|
||||
logger.Panic("identity from bundle and compressed are not equal")
|
||||
}
|
||||
|
||||
return p.multidevice.AddInstallations(bundle.GetIdentity(), bundle.GetTimestamp(), installations, enabled)
|
||||
}
|
||||
|
||||
func (p *Protocol) GetMultiDevice() *multidevice.Multidevice {
|
||||
return p.multidevice
|
||||
}
|
||||
|
||||
// recoverInstallationsFromBundle extracts installations from the bundle.
|
||||
// It returns extracted installations and true if the installations
|
||||
// are ours, i.e. the bundle was created by our identity key.
|
||||
func (p *Protocol) recoverInstallationsFromBundle(myIdentityKey *ecdsa.PrivateKey, bundle *Bundle) ([]*multidevice.Installation, bool, error) {
|
||||
var installations []*multidevice.Installation
|
||||
|
||||
theirIdentity, err := ExtractIdentity(bundle)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
myIdentityStr := fmt.Sprintf("0x%x", crypto.FromECDSAPub(&myIdentityKey.PublicKey))
|
||||
theirIdentityStr := fmt.Sprintf("0x%x", crypto.FromECDSAPub(theirIdentity))
|
||||
// Any device from other peers will be considered enabled, ours needs to
|
||||
// be explicitly enabled.
|
||||
enabled := theirIdentityStr != myIdentityStr
|
||||
signedPreKeys := bundle.GetSignedPreKeys()
|
||||
|
||||
for installationID, signedPreKey := range signedPreKeys {
|
||||
if installationID != p.multidevice.InstallationID() {
|
||||
installations = append(installations, &multidevice.Installation{
|
||||
Identity: theirIdentityStr,
|
||||
ID: installationID,
|
||||
Version: signedPreKey.GetProtocolVersion(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return installations, enabled, nil
|
||||
}
|
||||
|
||||
// GetBundle retrieves or creates a X3DH bundle, given a private identity key.
|
||||
func (p *Protocol) GetBundle(myIdentityKey *ecdsa.PrivateKey) (*Bundle, error) {
|
||||
installations, err := p.multidevice.GetOurActiveInstallations(&myIdentityKey.PublicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return p.encryptor.CreateBundle(myIdentityKey, installations)
|
||||
}
|
||||
|
||||
// EnableInstallation enables an installation for multi-device sync.
|
||||
func (p *Protocol) EnableInstallation(myIdentityKey *ecdsa.PublicKey, installationID string) error {
|
||||
return p.multidevice.EnableInstallation(myIdentityKey, installationID)
|
||||
}
|
||||
|
||||
// DisableInstallation disables an installation for multi-device sync.
|
||||
func (p *Protocol) DisableInstallation(myIdentityKey *ecdsa.PublicKey, installationID string) error {
|
||||
return p.multidevice.DisableInstallation(myIdentityKey, installationID)
|
||||
}
|
||||
|
||||
// GetOurInstallations returns all the installations available given an identity
|
||||
func (p *Protocol) GetOurInstallations(myIdentityKey *ecdsa.PublicKey) ([]*multidevice.Installation, error) {
|
||||
return p.multidevice.GetOurInstallations(myIdentityKey)
|
||||
}
|
||||
|
||||
// GetOurActiveInstallations returns all the active installations available given an identity
|
||||
func (p *Protocol) GetOurActiveInstallations(myIdentityKey *ecdsa.PublicKey) ([]*multidevice.Installation, error) {
|
||||
return p.multidevice.GetOurActiveInstallations(myIdentityKey)
|
||||
}
|
||||
|
||||
// SetInstallationMetadata sets the metadata for our own installation
|
||||
func (p *Protocol) SetInstallationMetadata(myIdentityKey *ecdsa.PublicKey, installationID string, data *multidevice.InstallationMetadata) error {
|
||||
return p.multidevice.SetInstallationMetadata(myIdentityKey, installationID, data)
|
||||
}
|
||||
|
||||
// SetInstallationName sets the metadata for our own installation
|
||||
func (p *Protocol) SetInstallationName(myIdentityKey *ecdsa.PublicKey, installationID string, name string) error {
|
||||
return p.multidevice.SetInstallationName(myIdentityKey, installationID, name)
|
||||
}
|
||||
|
||||
// GetPublicBundle retrieves a public bundle given an identity
|
||||
func (p *Protocol) GetPublicBundle(theirIdentityKey *ecdsa.PublicKey) (*Bundle, error) {
|
||||
installations, err := p.multidevice.GetActiveInstallations(theirIdentityKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return p.encryptor.GetPublicBundle(theirIdentityKey, installations)
|
||||
}
|
||||
|
||||
// ConfirmMessageProcessed confirms and deletes message keys for the given messages
|
||||
func (p *Protocol) ConfirmMessageProcessed(messageID []byte) error {
|
||||
logger := p.logger.With(zap.String("site", "ConfirmMessageProcessed"))
|
||||
logger.Debug("confirming message", zap.String("messageID", types.EncodeHex(messageID)))
|
||||
return p.encryptor.ConfirmMessageProcessed(messageID)
|
||||
}
|
||||
|
||||
type HashRatchetInfo struct {
|
||||
GroupID []byte
|
||||
KeyID []byte
|
||||
}
|
||||
type DecryptMessageResponse struct {
|
||||
DecryptedMessage []byte
|
||||
Installations []*multidevice.Installation
|
||||
SharedSecrets []*sharedsecret.Secret
|
||||
HashRatchetInfo []*HashRatchetInfo
|
||||
}
|
||||
|
||||
func (p *Protocol) HandleHashRatchetKeysPayload(groupID, encodedKeys []byte, myIdentityKey *ecdsa.PrivateKey, theirIdentityKey *ecdsa.PublicKey) ([]*HashRatchetInfo, error) {
|
||||
|
||||
keys := &HRKeys{}
|
||||
err := proto.Unmarshal(encodedKeys, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return p.HandleHashRatchetKeys(groupID, keys, myIdentityKey, theirIdentityKey)
|
||||
}
|
||||
|
||||
func (p *Protocol) HandleHashRatchetKeys(groupID []byte, keys *HRKeys, myIdentityKey *ecdsa.PrivateKey, theirIdentityKey *ecdsa.PublicKey) ([]*HashRatchetInfo, error) {
|
||||
if keys == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var info []*HashRatchetInfo
|
||||
|
||||
for _, key := range keys.Keys {
|
||||
ratchet := &HashRatchetKeyCompatibility{
|
||||
GroupID: groupID,
|
||||
Timestamp: key.Timestamp,
|
||||
Key: key.Key,
|
||||
}
|
||||
|
||||
// If there's no timestamp, is coming from an older client
|
||||
if key.Timestamp == 0 {
|
||||
ratchet.Timestamp = uint64(key.DeprecatedKeyId)
|
||||
}
|
||||
keyID, err := ratchet.GetKeyID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p.logger.Debug("retrieved keys", zap.String("keyID", types.Bytes2Hex(keyID)))
|
||||
|
||||
// Payload contains hash ratchet key
|
||||
err = p.encryptor.persistence.SaveHashRatchetKey(ratchet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
info = append(info, &HashRatchetInfo{GroupID: groupID, KeyID: keyID})
|
||||
}
|
||||
|
||||
if keys.RekeyGroup != nil {
|
||||
if keys.RekeyGroup.Timestamp == 0 {
|
||||
return nil, errors.New("timestamp can't be nil")
|
||||
}
|
||||
|
||||
encryptionKey, err := decryptGroupRekeyMessage(myIdentityKey, theirIdentityKey, keys.RekeyGroup)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(encryptionKey) != 0 {
|
||||
|
||||
ratchet := &HashRatchetKeyCompatibility{
|
||||
GroupID: groupID,
|
||||
Timestamp: keys.RekeyGroup.Timestamp,
|
||||
Key: encryptionKey,
|
||||
}
|
||||
|
||||
keyID, err := ratchet.GetKeyID()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p.logger.Debug("retrieved group keys", zap.String("keyID", types.Bytes2Hex(keyID)))
|
||||
// Payload contains hash ratchet key
|
||||
err = p.encryptor.persistence.SaveHashRatchetKey(ratchet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
info = append(info, &HashRatchetInfo{GroupID: groupID, KeyID: keyID})
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return info, nil
|
||||
}
|
||||
|
||||
// HandleMessage unmarshals a message and processes it, decrypting it if it is a 1:1 message.
|
||||
func (p *Protocol) HandleMessage(
|
||||
myIdentityKey *ecdsa.PrivateKey,
|
||||
theirPublicKey *ecdsa.PublicKey,
|
||||
protocolMessage *ProtocolMessage,
|
||||
messageID []byte,
|
||||
) (*DecryptMessageResponse, error) {
|
||||
logger := p.logger.With(zap.String("site", "HandleMessage"))
|
||||
response := &DecryptMessageResponse{}
|
||||
|
||||
logger.Debug("received a protocol message",
|
||||
zap.String("sender-public-key",
|
||||
types.EncodeHex(crypto.FromECDSAPub(theirPublicKey))),
|
||||
zap.String("my-installation-id", p.encryptor.config.InstallationID),
|
||||
zap.String("messageID", types.EncodeHex(messageID)))
|
||||
|
||||
if p.encryptor == nil {
|
||||
return nil, errors.New("encryption service not initialized")
|
||||
}
|
||||
|
||||
// Process bundles
|
||||
for _, bundle := range protocolMessage.GetBundles() {
|
||||
// Should we stop processing if the bundle cannot be verified?
|
||||
newInstallations, err := p.ProcessPublicBundle(myIdentityKey, bundle)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
response.Installations = newInstallations
|
||||
}
|
||||
|
||||
// Check if it's a public message
|
||||
if publicMessage := protocolMessage.GetPublicMessage(); publicMessage != nil {
|
||||
// Nothing to do, as already in cleartext
|
||||
response.DecryptedMessage = publicMessage
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Decrypt message
|
||||
if encryptedMessage := protocolMessage.GetEncryptedMessage(); encryptedMessage != nil {
|
||||
message, err := p.encryptor.DecryptPayload(
|
||||
myIdentityKey,
|
||||
theirPublicKey,
|
||||
protocolMessage.GetInstallationId(),
|
||||
encryptedMessage,
|
||||
messageID,
|
||||
)
|
||||
|
||||
if err == ErrHashRatchetGroupIDNotFound {
|
||||
msg := p.encryptor.GetMessage(encryptedMessage)
|
||||
|
||||
if msg != nil {
|
||||
if header := msg.GetHRHeader(); header != nil {
|
||||
response.HashRatchetInfo = append(response.HashRatchetInfo, &HashRatchetInfo{GroupID: header.GroupId, KeyID: header.KeyId})
|
||||
}
|
||||
}
|
||||
return response, err
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dmProtocol := encryptedMessage[p.encryptor.config.InstallationID]
|
||||
if dmProtocol == nil {
|
||||
dmProtocol = encryptedMessage[noInstallationID]
|
||||
}
|
||||
if dmProtocol != nil {
|
||||
hrHeader := dmProtocol.HRHeader
|
||||
if hrHeader != nil && hrHeader.SeqNo == 0 {
|
||||
var hashRatchetKeys []*HashRatchetInfo
|
||||
if hrHeader.Keys != nil {
|
||||
hashRatchetKeys, err = p.HandleHashRatchetKeys(hrHeader.GroupId, hrHeader.Keys, myIdentityKey, theirPublicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
} else {
|
||||
// For backward compatibility
|
||||
hashRatchetKeys, err = p.HandleHashRatchetKeysPayload(hrHeader.GroupId, message, myIdentityKey, theirPublicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
response.HashRatchetInfo = hashRatchetKeys
|
||||
}
|
||||
}
|
||||
|
||||
bundles := protocolMessage.GetBundles()
|
||||
version := getProtocolVersion(bundles, protocolMessage.GetInstallationId())
|
||||
if version >= sharedSecretNegotiationVersion {
|
||||
sharedSecret, err := p.secret.Generate(myIdentityKey, theirPublicKey, protocolMessage.GetInstallationId())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.SharedSecrets = []*sharedsecret.Secret{sharedSecret}
|
||||
}
|
||||
response.DecryptedMessage = message
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Return error
|
||||
return nil, ErrNoPayload
|
||||
}
|
||||
|
||||
func (p *Protocol) ShouldAdvertiseBundle(publicKey *ecdsa.PublicKey, time int64) (bool, error) {
|
||||
return p.publisher.ShouldAdvertiseBundle(publicKey, time)
|
||||
}
|
||||
|
||||
func (p *Protocol) ConfirmBundleAdvertisement(publicKey *ecdsa.PublicKey, time int64) {
|
||||
p.publisher.SetLastAck(publicKey, time)
|
||||
}
|
||||
|
||||
func (p *Protocol) BuildBundleAdvertiseMessage(myIdentityKey *ecdsa.PrivateKey, publicKey *ecdsa.PublicKey) (*ProtocolMessageSpec, error) {
|
||||
return p.BuildDHMessage(myIdentityKey, publicKey, nil)
|
||||
}
|
||||
|
||||
func getProtocolVersion(bundles []*Bundle, installationID string) uint32 {
|
||||
if installationID == "" {
|
||||
return defaultMinVersion
|
||||
}
|
||||
|
||||
for _, bundle := range bundles {
|
||||
if bundle != nil {
|
||||
signedPreKeys := bundle.GetSignedPreKeys()
|
||||
if signedPreKeys == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
signedPreKey := signedPreKeys[installationID]
|
||||
if signedPreKey == nil {
|
||||
return defaultMinVersion
|
||||
}
|
||||
|
||||
return signedPreKey.GetProtocolVersion()
|
||||
}
|
||||
}
|
||||
|
||||
return defaultMinVersion
|
||||
}
|
||||
|
||||
func (p *Protocol) EncryptWithHashRatchet(groupID []byte, payload []byte) ([]byte, *HashRatchetKeyCompatibility, uint32, error) {
|
||||
ratchet, err := p.encryptor.persistence.GetCurrentKeyForGroup(groupID)
|
||||
if err != nil {
|
||||
return nil, nil, 0, err
|
||||
}
|
||||
|
||||
encryptedPayload, newSeqNo, err := p.encryptor.EncryptWithHR(ratchet, payload)
|
||||
if err != nil {
|
||||
return nil, nil, 0, err
|
||||
}
|
||||
|
||||
return encryptedPayload, ratchet, newSeqNo, nil
|
||||
}
|
||||
|
||||
func (p *Protocol) DecryptWithHashRatchet(keyID []byte, seqNo uint32, payload []byte) ([]byte, error) {
|
||||
ratchet, err := p.encryptor.persistence.GetHashRatchetKeyByID(keyID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ratchet == nil {
|
||||
return nil, ErrNoRatchetKey
|
||||
}
|
||||
|
||||
return p.encryptor.DecryptWithHR(ratchet, seqNo, payload)
|
||||
}
|
||||
791
vendor/github.com/status-im/status-go/protocol/encryption/protocol_message.pb.go
generated
vendored
Normal file
791
vendor/github.com/status-im/status-go/protocol/encryption/protocol_message.pb.go
generated
vendored
Normal file
@@ -0,0 +1,791 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: protocol_message.proto
|
||||
|
||||
package encryption
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
type SignedPreKey struct {
|
||||
SignedPreKey []byte `protobuf:"bytes,1,opt,name=signed_pre_key,json=signedPreKey,proto3" json:"signed_pre_key,omitempty"`
|
||||
Version uint32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"`
|
||||
ProtocolVersion uint32 `protobuf:"varint,3,opt,name=protocol_version,json=protocolVersion,proto3" json:"protocol_version,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SignedPreKey) Reset() { *m = SignedPreKey{} }
|
||||
func (m *SignedPreKey) String() string { return proto.CompactTextString(m) }
|
||||
func (*SignedPreKey) ProtoMessage() {}
|
||||
func (*SignedPreKey) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{0}
|
||||
}
|
||||
|
||||
func (m *SignedPreKey) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SignedPreKey.Unmarshal(m, b)
|
||||
}
|
||||
func (m *SignedPreKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_SignedPreKey.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *SignedPreKey) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_SignedPreKey.Merge(m, src)
|
||||
}
|
||||
func (m *SignedPreKey) XXX_Size() int {
|
||||
return xxx_messageInfo_SignedPreKey.Size(m)
|
||||
}
|
||||
func (m *SignedPreKey) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_SignedPreKey.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_SignedPreKey proto.InternalMessageInfo
|
||||
|
||||
func (m *SignedPreKey) GetSignedPreKey() []byte {
|
||||
if m != nil {
|
||||
return m.SignedPreKey
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *SignedPreKey) GetVersion() uint32 {
|
||||
if m != nil {
|
||||
return m.Version
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *SignedPreKey) GetProtocolVersion() uint32 {
|
||||
if m != nil {
|
||||
return m.ProtocolVersion
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// X3DH prekey bundle
|
||||
type Bundle struct {
|
||||
// Identity key
|
||||
Identity []byte `protobuf:"bytes,1,opt,name=identity,proto3" json:"identity,omitempty"`
|
||||
// Installation id
|
||||
SignedPreKeys map[string]*SignedPreKey `protobuf:"bytes,2,rep,name=signed_pre_keys,json=signedPreKeys,proto3" json:"signed_pre_keys,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
// Prekey signature
|
||||
Signature []byte `protobuf:"bytes,4,opt,name=signature,proto3" json:"signature,omitempty"`
|
||||
// When the bundle was created locally
|
||||
Timestamp int64 `protobuf:"varint,5,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Bundle) Reset() { *m = Bundle{} }
|
||||
func (m *Bundle) String() string { return proto.CompactTextString(m) }
|
||||
func (*Bundle) ProtoMessage() {}
|
||||
func (*Bundle) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{1}
|
||||
}
|
||||
|
||||
func (m *Bundle) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Bundle.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Bundle) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Bundle.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Bundle) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Bundle.Merge(m, src)
|
||||
}
|
||||
func (m *Bundle) XXX_Size() int {
|
||||
return xxx_messageInfo_Bundle.Size(m)
|
||||
}
|
||||
func (m *Bundle) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Bundle.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Bundle proto.InternalMessageInfo
|
||||
|
||||
func (m *Bundle) GetIdentity() []byte {
|
||||
if m != nil {
|
||||
return m.Identity
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Bundle) GetSignedPreKeys() map[string]*SignedPreKey {
|
||||
if m != nil {
|
||||
return m.SignedPreKeys
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Bundle) GetSignature() []byte {
|
||||
if m != nil {
|
||||
return m.Signature
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Bundle) GetTimestamp() int64 {
|
||||
if m != nil {
|
||||
return m.Timestamp
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type BundleContainer struct {
|
||||
// X3DH prekey bundle
|
||||
Bundle *Bundle `protobuf:"bytes,1,opt,name=bundle,proto3" json:"bundle,omitempty"`
|
||||
// Private signed prekey
|
||||
PrivateSignedPreKey []byte `protobuf:"bytes,2,opt,name=private_signed_pre_key,json=privateSignedPreKey,proto3" json:"private_signed_pre_key,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *BundleContainer) Reset() { *m = BundleContainer{} }
|
||||
func (m *BundleContainer) String() string { return proto.CompactTextString(m) }
|
||||
func (*BundleContainer) ProtoMessage() {}
|
||||
func (*BundleContainer) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{2}
|
||||
}
|
||||
|
||||
func (m *BundleContainer) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_BundleContainer.Unmarshal(m, b)
|
||||
}
|
||||
func (m *BundleContainer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_BundleContainer.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *BundleContainer) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_BundleContainer.Merge(m, src)
|
||||
}
|
||||
func (m *BundleContainer) XXX_Size() int {
|
||||
return xxx_messageInfo_BundleContainer.Size(m)
|
||||
}
|
||||
func (m *BundleContainer) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_BundleContainer.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_BundleContainer proto.InternalMessageInfo
|
||||
|
||||
func (m *BundleContainer) GetBundle() *Bundle {
|
||||
if m != nil {
|
||||
return m.Bundle
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *BundleContainer) GetPrivateSignedPreKey() []byte {
|
||||
if m != nil {
|
||||
return m.PrivateSignedPreKey
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type DRHeader struct {
|
||||
// Current ratchet public key
|
||||
Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
|
||||
// Number of the message in the sending chain
|
||||
N uint32 `protobuf:"varint,2,opt,name=n,proto3" json:"n,omitempty"`
|
||||
// Length of the previous sending chain
|
||||
Pn uint32 `protobuf:"varint,3,opt,name=pn,proto3" json:"pn,omitempty"`
|
||||
// Bundle ID
|
||||
Id []byte `protobuf:"bytes,4,opt,name=id,proto3" json:"id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *DRHeader) Reset() { *m = DRHeader{} }
|
||||
func (m *DRHeader) String() string { return proto.CompactTextString(m) }
|
||||
func (*DRHeader) ProtoMessage() {}
|
||||
func (*DRHeader) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{3}
|
||||
}
|
||||
|
||||
func (m *DRHeader) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_DRHeader.Unmarshal(m, b)
|
||||
}
|
||||
func (m *DRHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_DRHeader.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *DRHeader) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_DRHeader.Merge(m, src)
|
||||
}
|
||||
func (m *DRHeader) XXX_Size() int {
|
||||
return xxx_messageInfo_DRHeader.Size(m)
|
||||
}
|
||||
func (m *DRHeader) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_DRHeader.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_DRHeader proto.InternalMessageInfo
|
||||
|
||||
func (m *DRHeader) GetKey() []byte {
|
||||
if m != nil {
|
||||
return m.Key
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *DRHeader) GetN() uint32 {
|
||||
if m != nil {
|
||||
return m.N
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *DRHeader) GetPn() uint32 {
|
||||
if m != nil {
|
||||
return m.Pn
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *DRHeader) GetId() []byte {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type DHHeader struct {
|
||||
// Compressed ephemeral public key
|
||||
Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *DHHeader) Reset() { *m = DHHeader{} }
|
||||
func (m *DHHeader) String() string { return proto.CompactTextString(m) }
|
||||
func (*DHHeader) ProtoMessage() {}
|
||||
func (*DHHeader) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{4}
|
||||
}
|
||||
|
||||
func (m *DHHeader) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_DHHeader.Unmarshal(m, b)
|
||||
}
|
||||
func (m *DHHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_DHHeader.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *DHHeader) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_DHHeader.Merge(m, src)
|
||||
}
|
||||
func (m *DHHeader) XXX_Size() int {
|
||||
return xxx_messageInfo_DHHeader.Size(m)
|
||||
}
|
||||
func (m *DHHeader) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_DHHeader.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_DHHeader proto.InternalMessageInfo
|
||||
|
||||
func (m *DHHeader) GetKey() []byte {
|
||||
if m != nil {
|
||||
return m.Key
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type X3DHHeader struct {
|
||||
// Ephemeral key used
|
||||
Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
|
||||
// Used bundle's signed prekey
|
||||
Id []byte `protobuf:"bytes,4,opt,name=id,proto3" json:"id,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *X3DHHeader) Reset() { *m = X3DHHeader{} }
|
||||
func (m *X3DHHeader) String() string { return proto.CompactTextString(m) }
|
||||
func (*X3DHHeader) ProtoMessage() {}
|
||||
func (*X3DHHeader) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{5}
|
||||
}
|
||||
|
||||
func (m *X3DHHeader) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_X3DHHeader.Unmarshal(m, b)
|
||||
}
|
||||
func (m *X3DHHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_X3DHHeader.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *X3DHHeader) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_X3DHHeader.Merge(m, src)
|
||||
}
|
||||
func (m *X3DHHeader) XXX_Size() int {
|
||||
return xxx_messageInfo_X3DHHeader.Size(m)
|
||||
}
|
||||
func (m *X3DHHeader) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_X3DHHeader.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_X3DHHeader proto.InternalMessageInfo
|
||||
|
||||
func (m *X3DHHeader) GetKey() []byte {
|
||||
if m != nil {
|
||||
return m.Key
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *X3DHHeader) GetId() []byte {
|
||||
if m != nil {
|
||||
return m.Id
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Hash Ratchet Header
|
||||
type HRHeader struct {
|
||||
// deprecated group key ID
|
||||
DeprecatedKeyId uint32 `protobuf:"varint,1,opt,name=deprecated_key_id,json=deprecatedKeyId,proto3" json:"deprecated_key_id,omitempty"`
|
||||
// group message number for this key_id
|
||||
SeqNo uint32 `protobuf:"varint,2,opt,name=seq_no,json=seqNo,proto3" json:"seq_no,omitempty"`
|
||||
// group ID
|
||||
GroupId []byte `protobuf:"bytes,3,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"`
|
||||
// group key ID
|
||||
KeyId []byte `protobuf:"bytes,4,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"`
|
||||
Keys *HRKeys `protobuf:"bytes,5,opt,name=keys,proto3" json:"keys,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *HRHeader) Reset() { *m = HRHeader{} }
|
||||
func (m *HRHeader) String() string { return proto.CompactTextString(m) }
|
||||
func (*HRHeader) ProtoMessage() {}
|
||||
func (*HRHeader) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{6}
|
||||
}
|
||||
|
||||
func (m *HRHeader) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_HRHeader.Unmarshal(m, b)
|
||||
}
|
||||
func (m *HRHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_HRHeader.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *HRHeader) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_HRHeader.Merge(m, src)
|
||||
}
|
||||
func (m *HRHeader) XXX_Size() int {
|
||||
return xxx_messageInfo_HRHeader.Size(m)
|
||||
}
|
||||
func (m *HRHeader) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_HRHeader.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_HRHeader proto.InternalMessageInfo
|
||||
|
||||
func (m *HRHeader) GetDeprecatedKeyId() uint32 {
|
||||
if m != nil {
|
||||
return m.DeprecatedKeyId
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *HRHeader) GetSeqNo() uint32 {
|
||||
if m != nil {
|
||||
return m.SeqNo
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *HRHeader) GetGroupId() []byte {
|
||||
if m != nil {
|
||||
return m.GroupId
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *HRHeader) GetKeyId() []byte {
|
||||
if m != nil {
|
||||
return m.KeyId
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *HRHeader) GetKeys() *HRKeys {
|
||||
if m != nil {
|
||||
return m.Keys
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type RekeyGroup struct {
|
||||
Timestamp uint64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
||||
Keys map[uint32][]byte `protobuf:"bytes,4,rep,name=keys,proto3" json:"keys,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *RekeyGroup) Reset() { *m = RekeyGroup{} }
|
||||
func (m *RekeyGroup) String() string { return proto.CompactTextString(m) }
|
||||
func (*RekeyGroup) ProtoMessage() {}
|
||||
func (*RekeyGroup) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{7}
|
||||
}
|
||||
|
||||
func (m *RekeyGroup) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_RekeyGroup.Unmarshal(m, b)
|
||||
}
|
||||
func (m *RekeyGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_RekeyGroup.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *RekeyGroup) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_RekeyGroup.Merge(m, src)
|
||||
}
|
||||
func (m *RekeyGroup) XXX_Size() int {
|
||||
return xxx_messageInfo_RekeyGroup.Size(m)
|
||||
}
|
||||
func (m *RekeyGroup) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_RekeyGroup.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_RekeyGroup proto.InternalMessageInfo
|
||||
|
||||
func (m *RekeyGroup) GetTimestamp() uint64 {
|
||||
if m != nil {
|
||||
return m.Timestamp
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *RekeyGroup) GetKeys() map[uint32][]byte {
|
||||
if m != nil {
|
||||
return m.Keys
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type HRKeys struct {
|
||||
Keys []*HRKey `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"`
|
||||
RekeyGroup *RekeyGroup `protobuf:"bytes,2,opt,name=rekey_group,json=rekeyGroup,proto3" json:"rekey_group,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *HRKeys) Reset() { *m = HRKeys{} }
|
||||
func (m *HRKeys) String() string { return proto.CompactTextString(m) }
|
||||
func (*HRKeys) ProtoMessage() {}
|
||||
func (*HRKeys) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{8}
|
||||
}
|
||||
|
||||
func (m *HRKeys) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_HRKeys.Unmarshal(m, b)
|
||||
}
|
||||
func (m *HRKeys) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_HRKeys.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *HRKeys) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_HRKeys.Merge(m, src)
|
||||
}
|
||||
func (m *HRKeys) XXX_Size() int {
|
||||
return xxx_messageInfo_HRKeys.Size(m)
|
||||
}
|
||||
func (m *HRKeys) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_HRKeys.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_HRKeys proto.InternalMessageInfo
|
||||
|
||||
func (m *HRKeys) GetKeys() []*HRKey {
|
||||
if m != nil {
|
||||
return m.Keys
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *HRKeys) GetRekeyGroup() *RekeyGroup {
|
||||
if m != nil {
|
||||
return m.RekeyGroup
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type HRKey struct {
|
||||
DeprecatedKeyId uint32 `protobuf:"varint,1,opt,name=deprecated_key_id,json=deprecatedKeyId,proto3" json:"deprecated_key_id,omitempty"`
|
||||
Key []byte `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"`
|
||||
Timestamp uint64 `protobuf:"varint,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *HRKey) Reset() { *m = HRKey{} }
|
||||
func (m *HRKey) String() string { return proto.CompactTextString(m) }
|
||||
func (*HRKey) ProtoMessage() {}
|
||||
func (*HRKey) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{9}
|
||||
}
|
||||
|
||||
func (m *HRKey) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_HRKey.Unmarshal(m, b)
|
||||
}
|
||||
func (m *HRKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_HRKey.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *HRKey) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_HRKey.Merge(m, src)
|
||||
}
|
||||
func (m *HRKey) XXX_Size() int {
|
||||
return xxx_messageInfo_HRKey.Size(m)
|
||||
}
|
||||
func (m *HRKey) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_HRKey.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_HRKey proto.InternalMessageInfo
|
||||
|
||||
func (m *HRKey) GetDeprecatedKeyId() uint32 {
|
||||
if m != nil {
|
||||
return m.DeprecatedKeyId
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *HRKey) GetKey() []byte {
|
||||
if m != nil {
|
||||
return m.Key
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *HRKey) GetTimestamp() uint64 {
|
||||
if m != nil {
|
||||
return m.Timestamp
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// Direct message value
|
||||
type EncryptedMessageProtocol struct {
|
||||
X3DHHeader *X3DHHeader `protobuf:"bytes,1,opt,name=X3DH_header,json=X3DHHeader,proto3" json:"X3DH_header,omitempty"`
|
||||
DRHeader *DRHeader `protobuf:"bytes,2,opt,name=DR_header,json=DRHeader,proto3" json:"DR_header,omitempty"`
|
||||
DHHeader *DHHeader `protobuf:"bytes,101,opt,name=DH_header,json=DHHeader,proto3" json:"DH_header,omitempty"`
|
||||
HRHeader *HRHeader `protobuf:"bytes,102,opt,name=HR_header,json=HRHeader,proto3" json:"HR_header,omitempty"`
|
||||
// Encrypted payload
|
||||
Payload []byte `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *EncryptedMessageProtocol) Reset() { *m = EncryptedMessageProtocol{} }
|
||||
func (m *EncryptedMessageProtocol) String() string { return proto.CompactTextString(m) }
|
||||
func (*EncryptedMessageProtocol) ProtoMessage() {}
|
||||
func (*EncryptedMessageProtocol) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{10}
|
||||
}
|
||||
|
||||
func (m *EncryptedMessageProtocol) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_EncryptedMessageProtocol.Unmarshal(m, b)
|
||||
}
|
||||
func (m *EncryptedMessageProtocol) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_EncryptedMessageProtocol.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *EncryptedMessageProtocol) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_EncryptedMessageProtocol.Merge(m, src)
|
||||
}
|
||||
func (m *EncryptedMessageProtocol) XXX_Size() int {
|
||||
return xxx_messageInfo_EncryptedMessageProtocol.Size(m)
|
||||
}
|
||||
func (m *EncryptedMessageProtocol) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_EncryptedMessageProtocol.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_EncryptedMessageProtocol proto.InternalMessageInfo
|
||||
|
||||
func (m *EncryptedMessageProtocol) GetX3DHHeader() *X3DHHeader {
|
||||
if m != nil {
|
||||
return m.X3DHHeader
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *EncryptedMessageProtocol) GetDRHeader() *DRHeader {
|
||||
if m != nil {
|
||||
return m.DRHeader
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *EncryptedMessageProtocol) GetDHHeader() *DHHeader {
|
||||
if m != nil {
|
||||
return m.DHHeader
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *EncryptedMessageProtocol) GetHRHeader() *HRHeader {
|
||||
if m != nil {
|
||||
return m.HRHeader
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *EncryptedMessageProtocol) GetPayload() []byte {
|
||||
if m != nil {
|
||||
return m.Payload
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Top-level protocol message
|
||||
type ProtocolMessage struct {
|
||||
// The device id of the sender
|
||||
InstallationId string `protobuf:"bytes,2,opt,name=installation_id,json=installationId,proto3" json:"installation_id,omitempty"`
|
||||
// List of bundles
|
||||
Bundles []*Bundle `protobuf:"bytes,3,rep,name=bundles,proto3" json:"bundles,omitempty"`
|
||||
// One to one message, encrypted, indexed by installation_id
|
||||
// TODO map here is redundant in case of community messages
|
||||
EncryptedMessage map[string]*EncryptedMessageProtocol `protobuf:"bytes,101,rep,name=encrypted_message,json=encryptedMessage,proto3" json:"encrypted_message,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
|
||||
// Public chats, not encrypted
|
||||
PublicMessage []byte `protobuf:"bytes,102,opt,name=public_message,json=publicMessage,proto3" json:"public_message,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ProtocolMessage) Reset() { *m = ProtocolMessage{} }
|
||||
func (m *ProtocolMessage) String() string { return proto.CompactTextString(m) }
|
||||
func (*ProtocolMessage) ProtoMessage() {}
|
||||
func (*ProtocolMessage) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_4e37b52004a72e16, []int{11}
|
||||
}
|
||||
|
||||
func (m *ProtocolMessage) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ProtocolMessage.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ProtocolMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ProtocolMessage.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ProtocolMessage) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ProtocolMessage.Merge(m, src)
|
||||
}
|
||||
func (m *ProtocolMessage) XXX_Size() int {
|
||||
return xxx_messageInfo_ProtocolMessage.Size(m)
|
||||
}
|
||||
func (m *ProtocolMessage) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ProtocolMessage.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ProtocolMessage proto.InternalMessageInfo
|
||||
|
||||
func (m *ProtocolMessage) GetInstallationId() string {
|
||||
if m != nil {
|
||||
return m.InstallationId
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *ProtocolMessage) GetBundles() []*Bundle {
|
||||
if m != nil {
|
||||
return m.Bundles
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ProtocolMessage) GetEncryptedMessage() map[string]*EncryptedMessageProtocol {
|
||||
if m != nil {
|
||||
return m.EncryptedMessage
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ProtocolMessage) GetPublicMessage() []byte {
|
||||
if m != nil {
|
||||
return m.PublicMessage
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*SignedPreKey)(nil), "encryption.SignedPreKey")
|
||||
proto.RegisterType((*Bundle)(nil), "encryption.Bundle")
|
||||
proto.RegisterMapType((map[string]*SignedPreKey)(nil), "encryption.Bundle.SignedPreKeysEntry")
|
||||
proto.RegisterType((*BundleContainer)(nil), "encryption.BundleContainer")
|
||||
proto.RegisterType((*DRHeader)(nil), "encryption.DRHeader")
|
||||
proto.RegisterType((*DHHeader)(nil), "encryption.DHHeader")
|
||||
proto.RegisterType((*X3DHHeader)(nil), "encryption.X3DHHeader")
|
||||
proto.RegisterType((*HRHeader)(nil), "encryption.HRHeader")
|
||||
proto.RegisterType((*RekeyGroup)(nil), "encryption.RekeyGroup")
|
||||
proto.RegisterMapType((map[uint32][]byte)(nil), "encryption.RekeyGroup.KeysEntry")
|
||||
proto.RegisterType((*HRKeys)(nil), "encryption.HRKeys")
|
||||
proto.RegisterType((*HRKey)(nil), "encryption.HRKey")
|
||||
proto.RegisterType((*EncryptedMessageProtocol)(nil), "encryption.EncryptedMessageProtocol")
|
||||
proto.RegisterType((*ProtocolMessage)(nil), "encryption.ProtocolMessage")
|
||||
proto.RegisterMapType((map[string]*EncryptedMessageProtocol)(nil), "encryption.ProtocolMessage.EncryptedMessageEntry")
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterFile("protocol_message.proto", fileDescriptor_4e37b52004a72e16)
|
||||
}
|
||||
|
||||
var fileDescriptor_4e37b52004a72e16 = []byte{
|
||||
// 770 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x55, 0xdb, 0x4e, 0xdc, 0x48,
|
||||
0x10, 0x95, 0xed, 0xb9, 0xd6, 0x5c, 0xe9, 0x05, 0xe4, 0x1d, 0xf1, 0x30, 0xb2, 0x60, 0x77, 0x16,
|
||||
0xad, 0xbc, 0xe2, 0x22, 0xb1, 0x22, 0x6f, 0x04, 0x94, 0x01, 0x44, 0x84, 0x3a, 0x52, 0x14, 0xf1,
|
||||
0x10, 0xcb, 0x8c, 0x0b, 0xb0, 0x18, 0x6c, 0x63, 0x7b, 0x50, 0xfc, 0x03, 0x79, 0xc9, 0x17, 0xe4,
|
||||
0x0b, 0xf2, 0x4f, 0xf9, 0x9a, 0xa8, 0x2f, 0xb6, 0x7b, 0x2e, 0x3c, 0xe4, 0xcd, 0x5d, 0x5d, 0x75,
|
||||
0xea, 0x54, 0xd5, 0xe9, 0x32, 0x6c, 0x46, 0x71, 0x98, 0x86, 0x93, 0x70, 0xea, 0x3c, 0x61, 0x92,
|
||||
0xb8, 0xf7, 0x68, 0x73, 0x03, 0x01, 0x0c, 0x26, 0x71, 0x16, 0xa5, 0x7e, 0x18, 0x58, 0x19, 0xb4,
|
||||
0x3f, 0xf8, 0xf7, 0x01, 0x7a, 0xd7, 0x31, 0x5e, 0x62, 0x46, 0xb6, 0xa1, 0x9b, 0xf0, 0xb3, 0x13,
|
||||
0xc5, 0xe8, 0x3c, 0x62, 0x66, 0x6a, 0x43, 0x6d, 0xd4, 0xa6, 0xed, 0x44, 0xf5, 0x32, 0xa1, 0xfe,
|
||||
0x82, 0x71, 0xe2, 0x87, 0x81, 0xa9, 0x0f, 0xb5, 0x51, 0x87, 0xe6, 0x47, 0xf2, 0x0f, 0xf4, 0x8b,
|
||||
0xac, 0xb9, 0x8b, 0xc1, 0x5d, 0x7a, 0xb9, 0xfd, 0xa3, 0x30, 0x5b, 0xdf, 0x74, 0xa8, 0x9d, 0xcc,
|
||||
0x02, 0x6f, 0x8a, 0x64, 0x00, 0x0d, 0xdf, 0xc3, 0x20, 0xf5, 0xd3, 0x3c, 0x5f, 0x71, 0x26, 0x57,
|
||||
0xd0, 0x9b, 0x67, 0x94, 0x98, 0xfa, 0xd0, 0x18, 0xb5, 0xf6, 0x77, 0xec, 0xb2, 0x0e, 0x5b, 0x00,
|
||||
0xd9, 0x6a, 0x2d, 0xc9, 0x59, 0x90, 0xc6, 0x19, 0xed, 0xa8, 0xcc, 0x13, 0xb2, 0x05, 0x4d, 0x66,
|
||||
0x70, 0xd3, 0x59, 0x8c, 0x66, 0x85, 0xe7, 0x2a, 0x0d, 0xec, 0x36, 0xf5, 0x9f, 0x30, 0x49, 0xdd,
|
||||
0xa7, 0xc8, 0xac, 0x0e, 0xb5, 0x91, 0x41, 0x4b, 0xc3, 0xe0, 0x06, 0xc8, 0x72, 0x02, 0xd2, 0x07,
|
||||
0x23, 0xef, 0x53, 0x93, 0xb2, 0x4f, 0x62, 0x43, 0xf5, 0xc5, 0x9d, 0xce, 0x90, 0x37, 0xa7, 0xb5,
|
||||
0x6f, 0xaa, 0x44, 0x55, 0x00, 0x2a, 0xdc, 0x8e, 0xf5, 0xff, 0x35, 0xeb, 0x0b, 0xf4, 0x44, 0x0d,
|
||||
0x6f, 0xc3, 0x20, 0x75, 0xfd, 0x00, 0x63, 0xb2, 0x0b, 0xb5, 0x5b, 0x6e, 0xe2, 0xd8, 0xad, 0x7d,
|
||||
0xb2, 0x5c, 0x30, 0x95, 0x1e, 0xe4, 0x80, 0x4d, 0xdb, 0x7f, 0x71, 0x53, 0x74, 0x16, 0xe6, 0xa7,
|
||||
0xf3, 0x1a, 0xff, 0x90, 0xb7, 0x6a, 0xfa, 0x8b, 0x4a, 0xc3, 0xe8, 0x57, 0xac, 0x0b, 0x68, 0x9c,
|
||||
0xd2, 0x31, 0xba, 0x1e, 0xc6, 0x6a, 0x2d, 0x6d, 0x51, 0x4b, 0x1b, 0xb4, 0x7c, 0xc8, 0x5a, 0x40,
|
||||
0xba, 0xa0, 0x47, 0xf9, 0x40, 0xf5, 0x88, 0x9f, 0x7d, 0x4f, 0xb6, 0x51, 0xf7, 0x3d, 0x6b, 0x0b,
|
||||
0x1a, 0xa7, 0xe3, 0xd7, 0xb0, 0xac, 0x43, 0x80, 0x4f, 0x07, 0xaf, 0xdf, 0x2f, 0xa2, 0x49, 0x7e,
|
||||
0x3f, 0x34, 0x68, 0x8c, 0x73, 0x82, 0xbb, 0xb0, 0xe6, 0x61, 0x14, 0xe3, 0xc4, 0x4d, 0xd1, 0x63,
|
||||
0xf5, 0x39, 0xbe, 0xc7, 0x21, 0x3a, 0xb4, 0x57, 0x5e, 0x5c, 0x62, 0x76, 0xee, 0x91, 0x0d, 0xa8,
|
||||
0x25, 0xf8, 0xec, 0x04, 0xa1, 0xe4, 0x5f, 0x4d, 0xf0, 0xf9, 0x7d, 0x48, 0xfe, 0x84, 0xc6, 0x7d,
|
||||
0x1c, 0xce, 0x22, 0x16, 0x69, 0xf0, 0x5c, 0x75, 0x7e, 0x16, 0x11, 0x12, 0x52, 0x90, 0xa8, 0x3e,
|
||||
0x72, 0xa0, 0xbf, 0xa0, 0xc2, 0x75, 0x57, 0x5d, 0x1e, 0xc3, 0x98, 0x32, 0x21, 0x50, 0x7e, 0x6f,
|
||||
0x7d, 0xd7, 0x00, 0x28, 0x3e, 0x62, 0xf6, 0x8e, 0xe1, 0xcd, 0x8b, 0x89, 0x51, 0xa8, 0x28, 0x62,
|
||||
0x22, 0x87, 0x12, 0xb4, 0xc2, 0xc5, 0x3c, 0x54, 0x41, 0x4b, 0x0c, 0xbb, 0xd4, 0x31, 0xf7, 0x1e,
|
||||
0x1c, 0x41, 0x73, 0xa5, 0xf2, 0x3a, 0xa2, 0x83, 0xeb, 0xaa, 0xf2, 0xda, 0xaa, 0xbe, 0x1e, 0xa0,
|
||||
0x26, 0xb8, 0x92, 0x1d, 0x99, 0x58, 0xe3, 0x89, 0xd7, 0x96, 0xaa, 0x11, 0x99, 0xc8, 0x11, 0xb4,
|
||||
0x62, 0xc6, 0xc3, 0xe1, 0xcd, 0x91, 0x52, 0xde, 0x5c, 0x4d, 0x93, 0x42, 0x5c, 0x7c, 0x5b, 0x13,
|
||||
0xa8, 0x72, 0x9c, 0xdf, 0x9a, 0x95, 0x2c, 0x45, 0x2f, 0xc5, 0x30, 0xd7, 0x3d, 0x63, 0xa1, 0x7b,
|
||||
0xd6, 0x57, 0x1d, 0xcc, 0x33, 0x41, 0x05, 0xbd, 0x2b, 0xb1, 0xde, 0xae, 0xe5, 0x82, 0x61, 0xd4,
|
||||
0x99, 0xce, 0x9c, 0x07, 0xae, 0x19, 0xf9, 0x7a, 0xe6, 0xa8, 0x97, 0x32, 0xa4, 0xaa, 0x24, 0xf7,
|
||||
0xa0, 0x79, 0x4a, 0xf3, 0x30, 0x51, 0xf1, 0xba, 0x1a, 0x96, 0xbf, 0x13, 0x5a, 0xbe, 0x18, 0x16,
|
||||
0x52, 0x64, 0xc2, 0x15, 0x21, 0xe3, 0x22, 0x44, 0xc9, 0x32, 0x2e, 0xb2, 0xdc, 0x2d, 0x87, 0x8c,
|
||||
0x8b, 0x2c, 0x85, 0xec, 0x4d, 0xa8, 0x47, 0x6e, 0x36, 0x0d, 0xdd, 0x42, 0xb2, 0xf2, 0x68, 0xfd,
|
||||
0xd4, 0xa1, 0x97, 0x17, 0x2e, 0xfb, 0x40, 0xfe, 0x86, 0x9e, 0x1f, 0x24, 0xa9, 0x3b, 0x9d, 0xba,
|
||||
0x0c, 0x90, 0xb5, 0x5d, 0xe7, 0xdb, 0xa9, 0xab, 0x9a, 0xcf, 0x3d, 0xf2, 0x2f, 0xd4, 0xc5, 0xfe,
|
||||
0x48, 0x4c, 0x83, 0xab, 0x61, 0xd5, 0x8a, 0xc9, 0x5d, 0xc8, 0x67, 0x58, 0xc3, 0xbc, 0xe5, 0xf9,
|
||||
0x2f, 0xc5, 0x44, 0x1e, 0xb7, 0xa7, 0xc6, 0x2d, 0xd0, 0xb1, 0x17, 0xe7, 0x24, 0xf4, 0xdc, 0xc7,
|
||||
0x05, 0x33, 0xd9, 0x81, 0x6e, 0x34, 0xbb, 0x9d, 0xfa, 0x93, 0x02, 0xfc, 0x8e, 0xd7, 0xda, 0x11,
|
||||
0x56, 0xe9, 0x36, 0xf0, 0x61, 0x63, 0x25, 0xe2, 0x8a, 0x45, 0x7c, 0x3c, 0xbf, 0x88, 0xb7, 0x55,
|
||||
0x96, 0xaf, 0xa9, 0x47, 0x79, 0x34, 0x27, 0xbd, 0x9b, 0x8e, 0xfd, 0xdf, 0x9b, 0x32, 0xe8, 0xb6,
|
||||
0xc6, 0x7f, 0x62, 0x07, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x5d, 0x20, 0xbb, 0xc5, 0x5b, 0x07,
|
||||
0x00, 0x00,
|
||||
}
|
||||
111
vendor/github.com/status-im/status-go/protocol/encryption/protocol_message.proto
generated
vendored
Normal file
111
vendor/github.com/status-im/status-go/protocol/encryption/protocol_message.proto
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
syntax = "proto3";
|
||||
|
||||
option go_package = "./;encryption";
|
||||
package encryption;
|
||||
|
||||
message SignedPreKey {
|
||||
bytes signed_pre_key = 1;
|
||||
uint32 version = 2;
|
||||
uint32 protocol_version = 3;
|
||||
}
|
||||
|
||||
// X3DH prekey bundle
|
||||
message Bundle {
|
||||
// Identity key
|
||||
bytes identity = 1;
|
||||
// Installation id
|
||||
map<string,SignedPreKey> signed_pre_keys = 2;
|
||||
// Prekey signature
|
||||
bytes signature = 4;
|
||||
|
||||
// When the bundle was created locally
|
||||
int64 timestamp = 5;
|
||||
}
|
||||
|
||||
message BundleContainer {
|
||||
reserved 3;
|
||||
// X3DH prekey bundle
|
||||
Bundle bundle = 1;
|
||||
// Private signed prekey
|
||||
bytes private_signed_pre_key = 2;
|
||||
}
|
||||
|
||||
message DRHeader {
|
||||
// Current ratchet public key
|
||||
bytes key = 1;
|
||||
// Number of the message in the sending chain
|
||||
uint32 n = 2;
|
||||
// Length of the previous sending chain
|
||||
uint32 pn = 3;
|
||||
// Bundle ID
|
||||
bytes id = 4;
|
||||
}
|
||||
|
||||
message DHHeader {
|
||||
// Compressed ephemeral public key
|
||||
bytes key = 1;
|
||||
}
|
||||
|
||||
message X3DHHeader {
|
||||
reserved 3;
|
||||
// Ephemeral key used
|
||||
bytes key = 1;
|
||||
// Used bundle's signed prekey
|
||||
bytes id = 4;
|
||||
}
|
||||
|
||||
// Hash Ratchet Header
|
||||
message HRHeader {
|
||||
// deprecated group key ID
|
||||
uint32 deprecated_key_id = 1;
|
||||
// group message number for this key_id
|
||||
uint32 seq_no = 2;
|
||||
// group ID
|
||||
bytes group_id = 3;
|
||||
// group key ID
|
||||
bytes key_id = 4;
|
||||
HRKeys keys = 5;
|
||||
}
|
||||
|
||||
message RekeyGroup {
|
||||
uint64 timestamp = 2;
|
||||
|
||||
map<uint32, bytes> keys = 4;
|
||||
}
|
||||
|
||||
message HRKeys {
|
||||
repeated HRKey keys = 1;
|
||||
RekeyGroup rekey_group = 2;
|
||||
}
|
||||
|
||||
message HRKey {
|
||||
uint32 deprecated_key_id = 1;
|
||||
bytes key = 2;
|
||||
uint64 timestamp = 3;
|
||||
}
|
||||
|
||||
// Direct message value
|
||||
message EncryptedMessageProtocol {
|
||||
X3DHHeader X3DH_header = 1;
|
||||
DRHeader DR_header = 2;
|
||||
DHHeader DH_header = 101;
|
||||
HRHeader HR_header = 102;
|
||||
// Encrypted payload
|
||||
bytes payload = 3;
|
||||
}
|
||||
|
||||
// Top-level protocol message
|
||||
message ProtocolMessage {
|
||||
// The device id of the sender
|
||||
string installation_id = 2;
|
||||
|
||||
// List of bundles
|
||||
repeated Bundle bundles = 3;
|
||||
|
||||
// One to one message, encrypted, indexed by installation_id
|
||||
// TODO map here is redundant in case of community messages
|
||||
map<string,EncryptedMessageProtocol> encrypted_message = 101;
|
||||
|
||||
// Public chats, not encrypted
|
||||
bytes public_message = 102;
|
||||
}
|
||||
6
vendor/github.com/status-im/status-go/protocol/encryption/publisher/doc.go
generated
vendored
Normal file
6
vendor/github.com/status-im/status-go/protocol/encryption/publisher/doc.go
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
// Publisher periodically publishes an info about itself on a known channel.
|
||||
// This channel is a particular topic calculated from the public key.
|
||||
// It is required for other peers to start a secure conversation immediately
|
||||
// using distibuted data through the channel.
|
||||
|
||||
package publisher
|
||||
38
vendor/github.com/status-im/status-go/protocol/encryption/publisher/persistence.go
generated
vendored
Normal file
38
vendor/github.com/status-im/status-go/protocol/encryption/publisher/persistence.go
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
package publisher
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type persistence struct {
|
||||
lastAcksMutex sync.Mutex
|
||||
lastPublished int64
|
||||
lastAcks map[string]int64
|
||||
}
|
||||
|
||||
func newPersistence() *persistence {
|
||||
return &persistence{
|
||||
lastAcks: make(map[string]int64),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *persistence) getLastPublished() int64 {
|
||||
return s.lastPublished
|
||||
}
|
||||
|
||||
func (s *persistence) setLastPublished(lastPublished int64) {
|
||||
s.lastPublished = lastPublished
|
||||
}
|
||||
|
||||
func (s *persistence) lastAck(identity []byte) int64 {
|
||||
s.lastAcksMutex.Lock()
|
||||
defer s.lastAcksMutex.Unlock()
|
||||
return s.lastAcks[hex.EncodeToString(identity)]
|
||||
}
|
||||
|
||||
func (s *persistence) setLastAck(identity []byte, lastAck int64) {
|
||||
s.lastAcksMutex.Lock()
|
||||
defer s.lastAcksMutex.Unlock()
|
||||
s.lastAcks[hex.EncodeToString(identity)] = lastAck
|
||||
}
|
||||
128
vendor/github.com/status-im/status-go/protocol/encryption/publisher/publisher.go
generated
vendored
Normal file
128
vendor/github.com/status-im/status-go/protocol/encryption/publisher/publisher.go
generated
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
package publisher
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/logutils"
|
||||
)
|
||||
|
||||
const (
|
||||
// How often a ticker fires in seconds.
|
||||
tickerInterval = 120
|
||||
// How often we should publish a contact code in seconds.
|
||||
publishInterval = 21600
|
||||
// Cooldown period on acking messages when not targeting our device.
|
||||
deviceNotFoundAckInterval = 7200
|
||||
)
|
||||
|
||||
var (
|
||||
errNotEnoughTimePassed = errors.New("not enough time passed")
|
||||
)
|
||||
|
||||
type Publisher struct {
|
||||
persistence *persistence
|
||||
logger *zap.Logger
|
||||
notifyCh chan struct{}
|
||||
quit chan struct{}
|
||||
}
|
||||
|
||||
func New(logger *zap.Logger) *Publisher {
|
||||
if logger == nil {
|
||||
logger = logutils.ZapLogger()
|
||||
}
|
||||
|
||||
return &Publisher{
|
||||
persistence: newPersistence(),
|
||||
logger: logger.With(zap.Namespace("Publisher")),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Publisher) Start() <-chan struct{} {
|
||||
logger := p.logger.With(zap.String("site", "Start"))
|
||||
|
||||
logger.Info("starting publisher")
|
||||
|
||||
p.notifyCh = make(chan struct{}, 100)
|
||||
p.quit = make(chan struct{})
|
||||
|
||||
go p.tickerLoop()
|
||||
|
||||
return p.notifyCh
|
||||
}
|
||||
|
||||
func (p *Publisher) Stop() {
|
||||
// If hasn't started, ignore
|
||||
if p.quit == nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case _, ok := <-p.quit:
|
||||
if !ok {
|
||||
// channel already closed
|
||||
return
|
||||
}
|
||||
default:
|
||||
close(p.quit)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Publisher) tickerLoop() {
|
||||
ticker := time.NewTicker(tickerInterval * time.Second)
|
||||
|
||||
go func() {
|
||||
logger := p.logger.With(zap.String("site", "tickerLoop"))
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
err := p.notify()
|
||||
switch err {
|
||||
case errNotEnoughTimePassed:
|
||||
logger.Debug("not enough time passed")
|
||||
case nil:
|
||||
// skip
|
||||
default:
|
||||
logger.Error("error while sending a contact code", zap.Error(err))
|
||||
}
|
||||
case <-p.quit:
|
||||
ticker.Stop()
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (p *Publisher) notify() error {
|
||||
lastPublished := p.persistence.getLastPublished()
|
||||
|
||||
now := time.Now().Unix()
|
||||
|
||||
if now-lastPublished < publishInterval {
|
||||
return errNotEnoughTimePassed
|
||||
}
|
||||
|
||||
select {
|
||||
case p.notifyCh <- struct{}{}:
|
||||
default:
|
||||
p.logger.Warn("publisher channel full, dropping message")
|
||||
}
|
||||
|
||||
p.persistence.setLastPublished(now)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Publisher) ShouldAdvertiseBundle(publicKey *ecdsa.PublicKey, now int64) (bool, error) {
|
||||
identity := crypto.CompressPubkey(publicKey)
|
||||
lastAcked := p.persistence.lastAck(identity)
|
||||
return now-lastAcked < deviceNotFoundAckInterval, nil
|
||||
}
|
||||
|
||||
func (p *Publisher) SetLastAck(publicKey *ecdsa.PublicKey, now int64) {
|
||||
identity := crypto.CompressPubkey(publicKey)
|
||||
p.persistence.setLastAck(identity, now)
|
||||
}
|
||||
120
vendor/github.com/status-im/status-go/protocol/encryption/sharedsecret/persistence.go
generated
vendored
Normal file
120
vendor/github.com/status-im/status-go/protocol/encryption/sharedsecret/persistence.go
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
package sharedsecret
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Response struct {
|
||||
secret []byte
|
||||
installationIDs map[string]bool
|
||||
}
|
||||
|
||||
type sqlitePersistence struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func newSQLitePersistence(db *sql.DB) *sqlitePersistence {
|
||||
return &sqlitePersistence{db: db}
|
||||
}
|
||||
|
||||
func (s *sqlitePersistence) Add(identity []byte, secret []byte, installationID string) error {
|
||||
tx, err := s.db.Begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
insertSecretStmt, err := tx.Prepare("INSERT INTO secrets(identity, secret) VALUES (?, ?)")
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
defer insertSecretStmt.Close()
|
||||
|
||||
_, err = insertSecretStmt.Exec(identity, secret)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
|
||||
insertInstallationIDStmt, err := tx.Prepare("INSERT INTO secret_installation_ids(id, identity_id) VALUES (?, ?)")
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
defer insertInstallationIDStmt.Close()
|
||||
|
||||
_, err = insertInstallationIDStmt.Exec(installationID, identity)
|
||||
if err != nil {
|
||||
_ = tx.Rollback()
|
||||
return err
|
||||
}
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (s *sqlitePersistence) Get(identity []byte, installationIDs []string) (*Response, error) {
|
||||
response := &Response{
|
||||
installationIDs: make(map[string]bool),
|
||||
}
|
||||
args := make([]interface{}, len(installationIDs)+1)
|
||||
args[0] = identity
|
||||
for i, installationID := range installationIDs {
|
||||
args[i+1] = installationID
|
||||
}
|
||||
|
||||
/* #nosec */
|
||||
query := `SELECT secret, id
|
||||
FROM secrets t
|
||||
JOIN
|
||||
secret_installation_ids tid
|
||||
ON t.identity = tid.identity_id
|
||||
WHERE
|
||||
t.identity = ?
|
||||
AND
|
||||
tid.id IN (?` + strings.Repeat(",?", len(installationIDs)-1) + `)`
|
||||
|
||||
rows, err := s.db.Query(query, args...)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var installationID string
|
||||
var secret []byte
|
||||
err = rows.Scan(&secret, &installationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.secret = secret
|
||||
response.installationIDs[installationID] = true
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (s *sqlitePersistence) All() ([][][]byte, error) {
|
||||
query := "SELECT identity, secret FROM secrets"
|
||||
|
||||
var secrets [][][]byte
|
||||
|
||||
rows, err := s.db.Query(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
for rows.Next() {
|
||||
var secret []byte
|
||||
var identity []byte
|
||||
err = rows.Scan(&identity, &secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
secrets = append(secrets, [][]byte{identity, secret})
|
||||
}
|
||||
|
||||
return secrets, nil
|
||||
}
|
||||
111
vendor/github.com/status-im/status-go/protocol/encryption/sharedsecret/sharedsecret.go
generated
vendored
Normal file
111
vendor/github.com/status-im/status-go/protocol/encryption/sharedsecret/sharedsecret.go
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
package sharedsecret
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/ecdsa"
|
||||
"database/sql"
|
||||
"errors"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/crypto/ecies"
|
||||
)
|
||||
|
||||
const sskLen = 16
|
||||
|
||||
type Secret struct {
|
||||
Identity *ecdsa.PublicKey
|
||||
Key []byte
|
||||
}
|
||||
|
||||
// SharedSecret generates and manages negotiated secrets.
|
||||
// Identities (public keys) stored by SharedSecret
|
||||
// are compressed.
|
||||
// TODO: make compression of public keys a responsibility of sqlitePersistence instead of SharedSecret.
|
||||
type SharedSecret struct {
|
||||
persistence *sqlitePersistence
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
func New(db *sql.DB, logger *zap.Logger) *SharedSecret {
|
||||
if logger == nil {
|
||||
logger = zap.NewNop()
|
||||
}
|
||||
|
||||
return &SharedSecret{
|
||||
persistence: newSQLitePersistence(db),
|
||||
logger: logger.With(zap.Namespace("SharedSecret")),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SharedSecret) generate(myPrivateKey *ecdsa.PrivateKey, theirPublicKey *ecdsa.PublicKey, installationID string) (*Secret, error) {
|
||||
sharedKey, err := ecies.ImportECDSA(myPrivateKey).GenerateShared(
|
||||
ecies.ImportECDSAPublic(theirPublicKey),
|
||||
sskLen,
|
||||
sskLen,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
theirIdentity := crypto.CompressPubkey(theirPublicKey)
|
||||
if err = s.persistence.Add(theirIdentity, sharedKey, installationID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Secret{Key: sharedKey, Identity: theirPublicKey}, err
|
||||
}
|
||||
|
||||
// Generate will generate a shared secret for a given identity, and return it.
|
||||
func (s *SharedSecret) Generate(myPrivateKey *ecdsa.PrivateKey, theirPublicKey *ecdsa.PublicKey, installationID string) (*Secret, error) {
|
||||
return s.generate(myPrivateKey, theirPublicKey, installationID)
|
||||
}
|
||||
|
||||
// Agreed returns true if a secret has been acknowledged by all the installationIDs.
|
||||
func (s *SharedSecret) Agreed(myPrivateKey *ecdsa.PrivateKey, myInstallationID string, theirPublicKey *ecdsa.PublicKey, theirInstallationIDs []string) (*Secret, bool, error) {
|
||||
secret, err := s.generate(myPrivateKey, theirPublicKey, myInstallationID)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
if len(theirInstallationIDs) == 0 {
|
||||
return secret, false, nil
|
||||
}
|
||||
|
||||
theirIdentity := crypto.CompressPubkey(theirPublicKey)
|
||||
response, err := s.persistence.Get(theirIdentity, theirInstallationIDs)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
for _, installationID := range theirInstallationIDs {
|
||||
if !response.installationIDs[installationID] {
|
||||
return secret, false, nil
|
||||
}
|
||||
}
|
||||
|
||||
if !bytes.Equal(secret.Key, response.secret) {
|
||||
return nil, false, errors.New("computed and saved secrets are different for a given identity")
|
||||
}
|
||||
|
||||
return secret, true, nil
|
||||
}
|
||||
|
||||
func (s *SharedSecret) All() ([]*Secret, error) {
|
||||
var secrets []*Secret
|
||||
tuples, err := s.persistence.All()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, tuple := range tuples {
|
||||
key, err := crypto.DecompressPubkey(tuple[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
secrets = append(secrets, &Secret{Identity: key, Key: tuple[1]})
|
||||
}
|
||||
|
||||
return secrets, nil
|
||||
}
|
||||
249
vendor/github.com/status-im/status-go/protocol/encryption/x3dh.go
generated
vendored
Normal file
249
vendor/github.com/status-im/status-go/protocol/encryption/x3dh.go
generated
vendored
Normal file
@@ -0,0 +1,249 @@
|
||||
package encryption
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"errors"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/crypto/ecies"
|
||||
)
|
||||
|
||||
const (
|
||||
// Shared secret key length
|
||||
sskLen = 16
|
||||
)
|
||||
|
||||
func buildSignatureMaterial(bundle *Bundle) []byte {
|
||||
signedPreKeys := bundle.GetSignedPreKeys()
|
||||
timestamp := bundle.GetTimestamp()
|
||||
var keys []string
|
||||
|
||||
for k := range signedPreKeys {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
var signatureMaterial []byte
|
||||
|
||||
sort.Strings(keys)
|
||||
|
||||
for _, installationID := range keys {
|
||||
signedPreKey := signedPreKeys[installationID]
|
||||
signatureMaterial = append(signatureMaterial, []byte(installationID)...)
|
||||
signatureMaterial = append(signatureMaterial, signedPreKey.SignedPreKey...)
|
||||
signatureMaterial = append(signatureMaterial, []byte(strconv.FormatUint(uint64(signedPreKey.Version), 10))...)
|
||||
// We don't use timestamp in the signature if it's 0, for backward compatibility
|
||||
}
|
||||
|
||||
if timestamp != 0 {
|
||||
signatureMaterial = append(signatureMaterial, []byte(strconv.FormatInt(timestamp, 10))...)
|
||||
}
|
||||
|
||||
return signatureMaterial
|
||||
|
||||
}
|
||||
|
||||
// SignBundle signs the bundle and refreshes the timestamps
|
||||
func SignBundle(identity *ecdsa.PrivateKey, bundleContainer *BundleContainer) error {
|
||||
bundleContainer.Bundle.Timestamp = time.Now().UnixNano()
|
||||
signatureMaterial := buildSignatureMaterial(bundleContainer.GetBundle())
|
||||
|
||||
signature, err := crypto.Sign(crypto.Keccak256(signatureMaterial), identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
bundleContainer.Bundle.Signature = signature
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewBundleContainer creates a new BundleContainer from an identity private key
|
||||
func NewBundleContainer(identity *ecdsa.PrivateKey, installationID string) (*BundleContainer, error) {
|
||||
preKey, err := crypto.GenerateKey()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
compressedPreKey := crypto.CompressPubkey(&preKey.PublicKey)
|
||||
compressedIdentityKey := crypto.CompressPubkey(&identity.PublicKey)
|
||||
|
||||
encodedPreKey := crypto.FromECDSA(preKey)
|
||||
signedPreKeys := make(map[string]*SignedPreKey)
|
||||
signedPreKeys[installationID] = &SignedPreKey{
|
||||
ProtocolVersion: protocolVersion,
|
||||
SignedPreKey: compressedPreKey,
|
||||
}
|
||||
|
||||
bundle := Bundle{
|
||||
Timestamp: time.Now().UnixNano(),
|
||||
Identity: compressedIdentityKey,
|
||||
SignedPreKeys: signedPreKeys,
|
||||
}
|
||||
|
||||
return &BundleContainer{
|
||||
Bundle: &bundle,
|
||||
PrivateSignedPreKey: encodedPreKey,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// VerifyBundle checks that a bundle is valid
|
||||
func VerifyBundle(bundle *Bundle) error {
|
||||
_, err := ExtractIdentity(bundle)
|
||||
return err
|
||||
}
|
||||
|
||||
// ExtractIdentity extracts the identity key from a given bundle
|
||||
func ExtractIdentity(bundle *Bundle) (*ecdsa.PublicKey, error) {
|
||||
bundleIdentityKey, err := crypto.DecompressPubkey(bundle.GetIdentity())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
signatureMaterial := buildSignatureMaterial(bundle)
|
||||
|
||||
recoveredKey, err := crypto.SigToPub(
|
||||
crypto.Keccak256(signatureMaterial),
|
||||
bundle.GetSignature(),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if crypto.PubkeyToAddress(*recoveredKey) != crypto.PubkeyToAddress(*bundleIdentityKey) {
|
||||
return nil, errors.New("identity key and signature mismatch")
|
||||
}
|
||||
|
||||
return recoveredKey, nil
|
||||
}
|
||||
|
||||
// PerformDH generates a shared key given a private and a public key
|
||||
func PerformDH(privateKey *ecies.PrivateKey, publicKey *ecies.PublicKey) ([]byte, error) {
|
||||
return privateKey.GenerateShared(
|
||||
publicKey,
|
||||
sskLen,
|
||||
sskLen,
|
||||
)
|
||||
}
|
||||
|
||||
func getSharedSecret(dh1 []byte, dh2 []byte, dh3 []byte) []byte {
|
||||
secretInput := append(append(dh1, dh2...), dh3...)
|
||||
|
||||
return crypto.Keccak256(secretInput)
|
||||
}
|
||||
|
||||
// x3dhActive handles initiating an X3DH session
|
||||
func x3dhActive(
|
||||
myIdentityKey *ecies.PrivateKey,
|
||||
theirSignedPreKey *ecies.PublicKey,
|
||||
myEphemeralKey *ecies.PrivateKey,
|
||||
theirIdentityKey *ecies.PublicKey,
|
||||
) ([]byte, error) {
|
||||
var dh1, dh2, dh3 []byte
|
||||
var err error
|
||||
|
||||
if dh1, err = PerformDH(myIdentityKey, theirSignedPreKey); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dh2, err = PerformDH(myEphemeralKey, theirIdentityKey); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dh3, err = PerformDH(myEphemeralKey, theirSignedPreKey); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return getSharedSecret(dh1, dh2, dh3), nil
|
||||
}
|
||||
|
||||
// x3dhPassive handles the response to an initiated X3DH session
|
||||
func x3dhPassive(
|
||||
theirIdentityKey *ecies.PublicKey,
|
||||
mySignedPreKey *ecies.PrivateKey,
|
||||
theirEphemeralKey *ecies.PublicKey,
|
||||
myIdentityKey *ecies.PrivateKey,
|
||||
) ([]byte, error) {
|
||||
var dh1, dh2, dh3 []byte
|
||||
var err error
|
||||
|
||||
if dh1, err = PerformDH(mySignedPreKey, theirIdentityKey); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dh2, err = PerformDH(myIdentityKey, theirEphemeralKey); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dh3, err = PerformDH(mySignedPreKey, theirEphemeralKey); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return getSharedSecret(dh1, dh2, dh3), nil
|
||||
}
|
||||
|
||||
// PerformActiveDH performs a Diffie-Hellman exchange using a public key and a generated ephemeral key.
|
||||
// Returns the key resulting from the DH exchange as well as the ephemeral public key.
|
||||
func PerformActiveDH(publicKey *ecdsa.PublicKey) ([]byte, *ecdsa.PublicKey, error) {
|
||||
ephemeralKey, err := crypto.GenerateKey()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
key, err := PerformDH(
|
||||
ecies.ImportECDSA(ephemeralKey),
|
||||
ecies.ImportECDSAPublic(publicKey),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return key, &ephemeralKey.PublicKey, err
|
||||
}
|
||||
|
||||
// PerformActiveX3DH takes someone else's bundle and calculates shared secret.
|
||||
// Returns the shared secret and the ephemeral key used.
|
||||
func PerformActiveX3DH(identity []byte, signedPreKey []byte, prv *ecdsa.PrivateKey) ([]byte, *ecdsa.PublicKey, error) {
|
||||
bundleIdentityKey, err := crypto.DecompressPubkey(identity)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
bundleSignedPreKey, err := crypto.DecompressPubkey(signedPreKey)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
ephemeralKey, err := crypto.GenerateKey()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
sharedSecret, err := x3dhActive(
|
||||
ecies.ImportECDSA(prv),
|
||||
ecies.ImportECDSAPublic(bundleSignedPreKey),
|
||||
ecies.ImportECDSA(ephemeralKey),
|
||||
ecies.ImportECDSAPublic(bundleIdentityKey),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return sharedSecret, &ephemeralKey.PublicKey, nil
|
||||
}
|
||||
|
||||
// PerformPassiveX3DH handles the part of the protocol where
|
||||
// our interlocutor used our bundle, with ID of the signedPreKey,
|
||||
// we loaded our identity key and the correct signedPreKey and we perform X3DH
|
||||
func PerformPassiveX3DH(theirIdentityKey *ecdsa.PublicKey, mySignedPreKey *ecdsa.PrivateKey, theirEphemeralKey *ecdsa.PublicKey, myPrivateKey *ecdsa.PrivateKey) ([]byte, error) {
|
||||
sharedSecret, err := x3dhPassive(
|
||||
ecies.ImportECDSAPublic(theirIdentityKey),
|
||||
ecies.ImportECDSA(mySignedPreKey),
|
||||
ecies.ImportECDSAPublic(theirEphemeralKey),
|
||||
ecies.ImportECDSA(myPrivateKey),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sharedSecret, nil
|
||||
}
|
||||
8
vendor/github.com/status-im/status-go/protocol/ens/const.go
generated
vendored
Normal file
8
vendor/github.com/status-im/status-go/protocol/ens/const.go
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
package ens
|
||||
|
||||
// maxRetries is the maximum number of attempts we do before giving up
|
||||
const maxRetries uint64 = 11
|
||||
|
||||
// ENSBackoffTimeSec is the step of the exponential backoff
|
||||
// we retry roughly for 17 hours after receiving the message 2^11 * 30
|
||||
const ENSBackoffTimeSec uint64 = 30
|
||||
119
vendor/github.com/status-im/status-go/protocol/ens/persistence.go
generated
vendored
Normal file
119
vendor/github.com/status-im/status-go/protocol/ens/persistence.go
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
package ens
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
)
|
||||
|
||||
type Persistence struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func NewPersistence(db *sql.DB) *Persistence {
|
||||
return &Persistence{db: db}
|
||||
}
|
||||
|
||||
func (p *Persistence) GetENSToBeVerified(now uint64) ([]*VerificationRecord, error) {
|
||||
rows, err := p.db.Query(`SELECT public_key, name, verified, verified_at, clock, verification_retries, next_retry FROM ens_verification_records WHERE NOT(verified) AND verification_retries < ? AND next_retry <= ?`, maxRetries, now)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var records []*VerificationRecord
|
||||
for rows.Next() {
|
||||
var record VerificationRecord
|
||||
err := rows.Scan(&record.PublicKey, &record.Name, &record.Verified, &record.VerifiedAt, &record.Clock, &record.VerificationRetries, &record.NextRetry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
records = append(records, &record)
|
||||
}
|
||||
|
||||
return records, nil
|
||||
}
|
||||
|
||||
func (p *Persistence) UpdateRecords(records []*VerificationRecord) (err error) {
|
||||
var tx *sql.Tx
|
||||
tx, err = p.db.BeginTx(context.Background(), &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
// don't shadow original error
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
for _, record := range records {
|
||||
var stmt *sql.Stmt
|
||||
stmt, err = tx.Prepare(`UPDATE ens_verification_records SET verified = ?, verified_at = ?, verification_retries = ?, next_retry = ? WHERE public_key = ?`)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer stmt.Close()
|
||||
_, err = stmt.Exec(record.Verified, record.VerifiedAt, record.VerificationRetries, record.NextRetry, record.PublicKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddRecord adds a record or return the latest available if already in the database and
|
||||
// hasn't changed
|
||||
func (p *Persistence) AddRecord(record VerificationRecord) (response *VerificationRecord, err error) {
|
||||
if !record.Valid() {
|
||||
err = errors.New("invalid ens record")
|
||||
return
|
||||
}
|
||||
var tx *sql.Tx
|
||||
tx, err = p.db.BeginTx(context.Background(), &sql.TxOptions{})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
err = tx.Commit()
|
||||
return
|
||||
}
|
||||
// don't shadow original error
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
dbRecord := &VerificationRecord{PublicKey: record.PublicKey}
|
||||
|
||||
err = tx.QueryRow(`SELECT name, clock, verified FROM ens_verification_records WHERE public_key = ?`, record.PublicKey).Scan(&dbRecord.Name, &dbRecord.Clock, &dbRecord.Verified)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return
|
||||
}
|
||||
|
||||
if dbRecord.Clock >= record.Clock || dbRecord.Name == record.Name {
|
||||
response = dbRecord
|
||||
return
|
||||
}
|
||||
|
||||
_, err = tx.Exec(`INSERT INTO ens_verification_records(public_key, name, clock) VALUES (?,?,?)`, record.PublicKey, record.Name, record.Clock)
|
||||
return
|
||||
}
|
||||
|
||||
func (p *Persistence) GetVerifiedRecord(publicKey string) (*VerificationRecord, error) {
|
||||
record := &VerificationRecord{}
|
||||
err := p.db.QueryRow(`SELECT name, clock FROM ens_verification_records WHERE verified AND public_key = ?`, publicKey).Scan(&record.Name, &record.Clock)
|
||||
switch err {
|
||||
case sql.ErrNoRows:
|
||||
return nil, nil
|
||||
case nil:
|
||||
return record, nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
|
||||
}
|
||||
25
vendor/github.com/status-im/status-go/protocol/ens/record.go
generated
vendored
Normal file
25
vendor/github.com/status-im/status-go/protocol/ens/record.go
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
package ens
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type VerificationRecord struct {
|
||||
PublicKey string
|
||||
Name string
|
||||
Clock uint64
|
||||
Verified bool
|
||||
VerifiedAt uint64
|
||||
VerificationRetries uint64
|
||||
NextRetry uint64
|
||||
}
|
||||
|
||||
// We calculate if it's too early to retry, by exponentially backing off
|
||||
func (e *VerificationRecord) CalculateNextRetry() {
|
||||
e.NextRetry = e.VerifiedAt + ENSBackoffTimeSec*uint64(math.Exp2(float64(e.VerificationRetries)))
|
||||
}
|
||||
|
||||
func (e *VerificationRecord) Valid() bool {
|
||||
return e.Name != "" && strings.HasSuffix(e.Name, ".eth") && e.Clock > 0
|
||||
}
|
||||
204
vendor/github.com/status-im/status-go/protocol/ens/verifier.go
generated
vendored
Normal file
204
vendor/github.com/status-im/status-go/protocol/ens/verifier.go
generated
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
package ens
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
gethcommon "github.com/ethereum/go-ethereum/common"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
enstypes "github.com/status-im/status-go/eth-node/types/ens"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
)
|
||||
|
||||
type Verifier struct {
|
||||
node types.Node
|
||||
online bool
|
||||
persistence *Persistence
|
||||
logger *zap.Logger
|
||||
timesource common.TimeSource
|
||||
subscriptions []chan []*VerificationRecord
|
||||
rpcEndpoint string
|
||||
contractAddress string
|
||||
quit chan struct{}
|
||||
}
|
||||
|
||||
func New(node types.Node, logger *zap.Logger, timesource common.TimeSource, db *sql.DB, rpcEndpoint, contractAddress string) *Verifier {
|
||||
persistence := NewPersistence(db)
|
||||
return &Verifier{
|
||||
node: node,
|
||||
logger: logger,
|
||||
persistence: persistence,
|
||||
timesource: timesource,
|
||||
rpcEndpoint: rpcEndpoint,
|
||||
contractAddress: contractAddress,
|
||||
quit: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (v *Verifier) Start() error {
|
||||
go v.verifyLoop()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *Verifier) Stop() error {
|
||||
close(v.quit)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ENSVerified adds an already verified entry to the ens table
|
||||
func (v *Verifier) ENSVerified(pk, ensName string, clock uint64) error {
|
||||
|
||||
// Add returns nil if no record was available
|
||||
oldRecord, err := v.Add(pk, ensName, clock)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var record *VerificationRecord
|
||||
|
||||
if oldRecord != nil {
|
||||
record = oldRecord
|
||||
} else {
|
||||
record = &VerificationRecord{PublicKey: pk, Name: ensName, Clock: clock}
|
||||
}
|
||||
|
||||
record.VerifiedAt = clock
|
||||
record.Verified = true
|
||||
records := []*VerificationRecord{record}
|
||||
err = v.persistence.UpdateRecords(records)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.publish(records)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *Verifier) GetVerifiedRecord(pk string) (*VerificationRecord, error) {
|
||||
return v.persistence.GetVerifiedRecord(pk)
|
||||
}
|
||||
|
||||
func (v *Verifier) Add(pk, ensName string, clock uint64) (*VerificationRecord, error) {
|
||||
record := VerificationRecord{PublicKey: pk, Name: ensName, Clock: clock}
|
||||
return v.persistence.AddRecord(record)
|
||||
}
|
||||
|
||||
func (v *Verifier) SetOnline(online bool) {
|
||||
v.online = online
|
||||
}
|
||||
|
||||
func (v *Verifier) verifyLoop() {
|
||||
|
||||
ticker := time.NewTicker(30 * time.Second)
|
||||
for {
|
||||
select {
|
||||
|
||||
case <-v.quit:
|
||||
ticker.Stop()
|
||||
return
|
||||
case <-ticker.C:
|
||||
if !v.online || v.rpcEndpoint == "" || v.contractAddress == "" {
|
||||
continue
|
||||
}
|
||||
err := v.verify(v.rpcEndpoint, v.contractAddress)
|
||||
if err != nil {
|
||||
v.logger.Error("verify loop failed", zap.Error(err))
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (v *Verifier) Subscribe() chan []*VerificationRecord {
|
||||
c := make(chan []*VerificationRecord)
|
||||
v.subscriptions = append(v.subscriptions, c)
|
||||
return c
|
||||
}
|
||||
|
||||
func (v *Verifier) publish(records []*VerificationRecord) {
|
||||
v.logger.Info("publishing records", zap.Any("records", records))
|
||||
// Publish on channels, drop if buffer is full
|
||||
for _, s := range v.subscriptions {
|
||||
select {
|
||||
case s <- records:
|
||||
default:
|
||||
v.logger.Warn("ens subscription channel full, dropping message")
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (v *Verifier) ReverseResolve(address gethcommon.Address) (string, error) {
|
||||
verifier := v.node.NewENSVerifier(v.logger)
|
||||
return verifier.ReverseResolve(address, v.rpcEndpoint)
|
||||
}
|
||||
|
||||
// Verify verifies that a registered ENS name matches the expected public key
|
||||
func (v *Verifier) verify(rpcEndpoint, contractAddress string) error {
|
||||
v.logger.Debug("verifying ENS Names", zap.String("endpoint", rpcEndpoint))
|
||||
verifier := v.node.NewENSVerifier(v.logger)
|
||||
|
||||
var ensDetails []enstypes.ENSDetails
|
||||
|
||||
// Now in seconds
|
||||
now := v.timesource.GetCurrentTime() / 1000
|
||||
ensToBeVerified, err := v.persistence.GetENSToBeVerified(now)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
recordsMap := make(map[string]*VerificationRecord)
|
||||
|
||||
for _, r := range ensToBeVerified {
|
||||
recordsMap[r.PublicKey] = r
|
||||
ensDetails = append(ensDetails, enstypes.ENSDetails{
|
||||
PublicKeyString: r.PublicKey[2:],
|
||||
Name: r.Name,
|
||||
})
|
||||
v.logger.Debug("verifying ens name", zap.Any("record", r))
|
||||
}
|
||||
|
||||
ensResponse, err := verifier.CheckBatch(ensDetails, rpcEndpoint, contractAddress)
|
||||
if err != nil {
|
||||
v.logger.Error("failed to check batch", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
var records []*VerificationRecord
|
||||
|
||||
for _, details := range ensResponse {
|
||||
pk := "0x" + details.PublicKeyString
|
||||
record := recordsMap[pk]
|
||||
|
||||
if details.Error == nil {
|
||||
record.Verified = details.Verified
|
||||
if !record.Verified {
|
||||
record.VerificationRetries++
|
||||
}
|
||||
} else {
|
||||
v.logger.Warn("Failed to resolve ens name",
|
||||
zap.String("name", details.Name),
|
||||
zap.String("publicKey", details.PublicKeyString),
|
||||
zap.Error(details.Error),
|
||||
)
|
||||
record.VerificationRetries++
|
||||
}
|
||||
record.VerifiedAt = now
|
||||
record.CalculateNextRetry()
|
||||
|
||||
records = append(records, record)
|
||||
}
|
||||
|
||||
err = v.persistence.UpdateRecords(records)
|
||||
if err != nil {
|
||||
|
||||
v.logger.Error("failed to update records", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
v.publish(records)
|
||||
|
||||
return nil
|
||||
}
|
||||
14
vendor/github.com/status-im/status-go/protocol/errors.go
generated
vendored
Normal file
14
vendor/github.com/status-im/status-go/protocol/errors.go
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrChatIDEmpty = errors.New("chat ID is empty")
|
||||
ErrChatNotFound = errors.New("can't find chat")
|
||||
ErrNotImplemented = errors.New("not implemented")
|
||||
ErrContactNotFound = errors.New("contact not found")
|
||||
ErrCommunityIDEmpty = errors.New("community ID is empty")
|
||||
ErrUserNotMember = errors.New("user not a member")
|
||||
)
|
||||
64
vendor/github.com/status-im/status-go/protocol/group_chat_invitation.go
generated
vendored
Normal file
64
vendor/github.com/status-im/status-go/protocol/group_chat_invitation.go
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// Invitation represents a group chat invitation request from a user in the application layer, used for persistence, querying and
|
||||
// signaling
|
||||
type GroupChatInvitation struct {
|
||||
*protobuf.GroupChatInvitation
|
||||
|
||||
// From is a public key of the author of the invitation request.
|
||||
From string `json:"from,omitempty"`
|
||||
|
||||
// SigPubKey is the ecdsa encoded public key of the invitation author
|
||||
SigPubKey *ecdsa.PublicKey `json:"-"`
|
||||
}
|
||||
|
||||
func NewGroupChatInvitation() *GroupChatInvitation {
|
||||
return &GroupChatInvitation{GroupChatInvitation: &protobuf.GroupChatInvitation{}}
|
||||
}
|
||||
|
||||
// ID is the Keccak256() contatenation of From-ChatId
|
||||
func (g *GroupChatInvitation) ID() string {
|
||||
return types.EncodeHex(crypto.Keccak256([]byte(fmt.Sprintf("%s%s", g.From, g.ChatId))))
|
||||
}
|
||||
|
||||
// GetSigPubKey returns an ecdsa encoded public key
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (g *GroupChatInvitation) GetSigPubKey() *ecdsa.PublicKey {
|
||||
return g.SigPubKey
|
||||
}
|
||||
|
||||
// GetProtoBuf returns the struct's embedded protobuf struct
|
||||
// this function is required to implement the ChatEntity interface
|
||||
func (g *GroupChatInvitation) GetProtobuf() proto.Message {
|
||||
return g.GroupChatInvitation
|
||||
}
|
||||
|
||||
func (g *GroupChatInvitation) MarshalJSON() ([]byte, error) {
|
||||
item := struct {
|
||||
ID string `json:"id"`
|
||||
ChatID string `json:"chatId,omitempty"`
|
||||
From string `json:"from"`
|
||||
IntroductionMessage string `json:"introductionMessage,omitempty"`
|
||||
State protobuf.GroupChatInvitation_State `json:"state,omitempty"`
|
||||
}{
|
||||
ID: g.ID(),
|
||||
ChatID: g.ChatId,
|
||||
From: g.From,
|
||||
IntroductionMessage: g.IntroductionMessage,
|
||||
State: g.State,
|
||||
}
|
||||
|
||||
return json.Marshal(item)
|
||||
}
|
||||
109
vendor/github.com/status-im/status-go/protocol/group_chat_system_messages.go
generated
vendored
Normal file
109
vendor/github.com/status-im/status-go/protocol/group_chat_system_messages.go
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
"github.com/status-im/status-go/eth-node/types"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
v1protocol "github.com/status-im/status-go/protocol/v1"
|
||||
)
|
||||
|
||||
var defaultSystemMessagesTranslations = new(systemMessageTranslationsMap)
|
||||
|
||||
func init() {
|
||||
defaultSystemMessagesTranslationSet := map[protobuf.MembershipUpdateEvent_EventType]string{
|
||||
protobuf.MembershipUpdateEvent_CHAT_CREATED: "{{from}} created the group {{name}}",
|
||||
protobuf.MembershipUpdateEvent_NAME_CHANGED: "{{from}} changed the group's name to {{name}}",
|
||||
protobuf.MembershipUpdateEvent_MEMBERS_ADDED: "{{from}} has added {{members}}",
|
||||
protobuf.MembershipUpdateEvent_ADMINS_ADDED: "{{from}} has made {{members}} admin",
|
||||
protobuf.MembershipUpdateEvent_MEMBER_REMOVED: "{{member}} left the group",
|
||||
protobuf.MembershipUpdateEvent_ADMIN_REMOVED: "{{member}} is not admin anymore",
|
||||
protobuf.MembershipUpdateEvent_COLOR_CHANGED: "{{from}} changed the group's color",
|
||||
protobuf.MembershipUpdateEvent_IMAGE_CHANGED: "{{from}} changed the group's image",
|
||||
}
|
||||
defaultSystemMessagesTranslations.Init(defaultSystemMessagesTranslationSet)
|
||||
}
|
||||
|
||||
func tsprintf(format string, params map[string]string) string {
|
||||
for key, val := range params {
|
||||
format = strings.Replace(format, "{{"+key+"}}", val, -1)
|
||||
}
|
||||
return format
|
||||
}
|
||||
|
||||
func eventToSystemMessage(e v1protocol.MembershipUpdateEvent, translations *systemMessageTranslationsMap) *common.Message {
|
||||
var text string
|
||||
switch e.Type {
|
||||
case protobuf.MembershipUpdateEvent_CHAT_CREATED:
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_CHAT_CREATED)
|
||||
text = tsprintf(message, map[string]string{"from": "@" + e.From, "name": e.Name})
|
||||
case protobuf.MembershipUpdateEvent_NAME_CHANGED:
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_NAME_CHANGED)
|
||||
text = tsprintf(message, map[string]string{"from": "@" + e.From, "name": e.Name})
|
||||
case protobuf.MembershipUpdateEvent_COLOR_CHANGED:
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_COLOR_CHANGED)
|
||||
text = tsprintf(message, map[string]string{"from": "@" + e.From})
|
||||
case protobuf.MembershipUpdateEvent_IMAGE_CHANGED:
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_IMAGE_CHANGED)
|
||||
text = tsprintf(message, map[string]string{"from": "@" + e.From})
|
||||
case protobuf.MembershipUpdateEvent_MEMBERS_ADDED:
|
||||
|
||||
var memberMentions []string
|
||||
for _, s := range e.Members {
|
||||
memberMentions = append(memberMentions, "@"+s)
|
||||
}
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_MEMBERS_ADDED)
|
||||
text = tsprintf(message, map[string]string{"from": "@" + e.From, "members": strings.Join(memberMentions, ", ")})
|
||||
case protobuf.MembershipUpdateEvent_ADMINS_ADDED:
|
||||
var memberMentions []string
|
||||
for _, s := range e.Members {
|
||||
memberMentions = append(memberMentions, "@"+s)
|
||||
}
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_ADMINS_ADDED)
|
||||
text = tsprintf(message, map[string]string{"from": "@" + e.From, "members": strings.Join(memberMentions, ", ")})
|
||||
case protobuf.MembershipUpdateEvent_MEMBER_REMOVED:
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_MEMBER_REMOVED)
|
||||
text = tsprintf(message, map[string]string{"member": "@" + e.Members[0]})
|
||||
case protobuf.MembershipUpdateEvent_ADMIN_REMOVED:
|
||||
message, _ := translations.Load(protobuf.MembershipUpdateEvent_ADMIN_REMOVED)
|
||||
text = tsprintf(message, map[string]string{"member": "@" + e.Members[0]})
|
||||
|
||||
}
|
||||
timestamp := v1protocol.TimestampInMsFromTime(time.Now())
|
||||
message := &common.Message{
|
||||
ChatMessage: &protobuf.ChatMessage{
|
||||
ChatId: e.ChatID,
|
||||
Text: text,
|
||||
MessageType: protobuf.MessageType_SYSTEM_MESSAGE_PRIVATE_GROUP,
|
||||
ContentType: protobuf.ChatMessage_SYSTEM_MESSAGE_CONTENT_PRIVATE_GROUP,
|
||||
Clock: e.ClockValue,
|
||||
Timestamp: timestamp,
|
||||
},
|
||||
From: e.From,
|
||||
WhisperTimestamp: timestamp,
|
||||
LocalChatID: e.ChatID,
|
||||
Seen: true,
|
||||
ID: types.EncodeHex(crypto.Keccak256(e.Signature)),
|
||||
}
|
||||
// We don't pass an identity here as system messages don't need the mentioned flag
|
||||
_ = message.PrepareContent("")
|
||||
return message
|
||||
}
|
||||
|
||||
func buildSystemMessages(events []v1protocol.MembershipUpdateEvent, translations *systemMessageTranslationsMap) []*common.Message {
|
||||
var messages []*common.Message
|
||||
|
||||
for _, e := range events {
|
||||
if e.Type == protobuf.MembershipUpdateEvent_MEMBER_JOINED {
|
||||
// explicit join has been removed, ignore this event
|
||||
continue
|
||||
}
|
||||
|
||||
messages = append(messages, eventToSystemMessage(e, translations))
|
||||
}
|
||||
|
||||
return messages
|
||||
}
|
||||
3100
vendor/github.com/status-im/status-go/protocol/identity/alias/data.go
generated
vendored
Normal file
3100
vendor/github.com/status-im/status-go/protocol/identity/alias/data.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
27
vendor/github.com/status-im/status-go/protocol/identity/alias/flfsr.go
generated
vendored
Normal file
27
vendor/github.com/status-im/status-go/protocol/identity/alias/flfsr.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
package alias
|
||||
|
||||
// For details: https://en.wikipedia.org/wiki/Linear-feedback_shift_register
|
||||
type LSFR struct {
|
||||
data uint64
|
||||
poly uint64
|
||||
}
|
||||
|
||||
func newLSFR(poly uint64, seed uint64) *LSFR {
|
||||
return &LSFR{data: seed, poly: poly}
|
||||
}
|
||||
|
||||
func (f *LSFR) next() uint64 {
|
||||
var bit uint64
|
||||
var i uint64
|
||||
|
||||
for i = 0; i < 64; i++ {
|
||||
if f.poly&(1<<i) != 0 {
|
||||
bit ^= (f.data >> i)
|
||||
}
|
||||
}
|
||||
bit &= 0x01
|
||||
|
||||
f.data = (f.data << 1) | bit
|
||||
|
||||
return f.data
|
||||
}
|
||||
45
vendor/github.com/status-im/status-go/protocol/identity/alias/generate.go
generated
vendored
Normal file
45
vendor/github.com/status-im/status-go/protocol/identity/alias/generate.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
|
||||
"github.com/status-im/status-go/eth-node/crypto"
|
||||
)
|
||||
|
||||
const poly uint64 = 0xB8
|
||||
|
||||
func generate(seed uint64) string {
|
||||
generator := newLSFR(poly, seed)
|
||||
adjective1Index := generator.next() % uint64(len(adjectives))
|
||||
adjective2Index := generator.next() % uint64(len(adjectives))
|
||||
animalIndex := generator.next() % uint64(len(animals))
|
||||
adjective1 := adjectives[adjective1Index]
|
||||
adjective2 := adjectives[adjective2Index]
|
||||
animal := animals[animalIndex]
|
||||
|
||||
return fmt.Sprintf("%s %s %s", adjective1, adjective2, animal)
|
||||
}
|
||||
|
||||
// GenerateFromPublicKey returns the 3 words name given an *ecdsa.PublicKey
|
||||
func GenerateFromPublicKey(publicKey *ecdsa.PublicKey) string {
|
||||
// Here we truncate the public key to the least significant 64 bits
|
||||
return generate(uint64(publicKey.X.Int64()))
|
||||
}
|
||||
|
||||
// GenerateFromPublicKeyString returns the 3 words name given a public key
|
||||
// prefixed with 0x
|
||||
func GenerateFromPublicKeyString(publicKeyString string) (string, error) {
|
||||
publicKeyBytes, err := hex.DecodeString(publicKeyString[2:])
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
publicKey, err := crypto.UnmarshalPubkey(publicKeyBytes)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return GenerateFromPublicKey(publicKey), nil
|
||||
}
|
||||
33
vendor/github.com/status-im/status-go/protocol/identity/alias/ops.go
generated
vendored
Normal file
33
vendor/github.com/status-im/status-go/protocol/identity/alias/ops.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
func IsAdjective(val string) bool {
|
||||
for _, v := range adjectives {
|
||||
if v == val {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func IsAnimal(val string) bool {
|
||||
for _, v := range animals {
|
||||
if v == val {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func IsAlias(alias string) bool {
|
||||
aliasParts := strings.Fields(alias)
|
||||
if len(aliasParts) == 3 {
|
||||
if IsAdjective(strings.Title(aliasParts[0])) && IsAdjective(strings.Title(aliasParts[1])) && IsAnimal(strings.Title(aliasParts[2])) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
74
vendor/github.com/status-im/status-go/protocol/identity/colorhash/colorhash.go
generated
vendored
Normal file
74
vendor/github.com/status-im/status-go/protocol/identity/colorhash/colorhash.go
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
package colorhash
|
||||
|
||||
import (
|
||||
"math/big"
|
||||
|
||||
"github.com/status-im/status-go/multiaccounts"
|
||||
"github.com/status-im/status-go/protocol/identity"
|
||||
)
|
||||
|
||||
const (
|
||||
colorHashSegmentMaxLen = 5
|
||||
colorHashColorsCount = 32
|
||||
)
|
||||
|
||||
var colorHashAlphabet [][]int
|
||||
|
||||
func GenerateFor(pubkey string) (hash multiaccounts.ColorHash, err error) {
|
||||
if len(colorHashAlphabet) == 0 {
|
||||
colorHashAlphabet = makeColorHashAlphabet(colorHashSegmentMaxLen, colorHashColorsCount)
|
||||
}
|
||||
|
||||
compressedKey, err := identity.ToCompressedKey(pubkey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
slices, err := identity.Slices(compressedKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return toColorHash(new(big.Int).SetBytes(slices[2]), &colorHashAlphabet, colorHashColorsCount), nil
|
||||
}
|
||||
|
||||
// [[1 0] [1 1] [1 2] ... [units, colors-1]]
|
||||
// [3 12] => 3 units length, 12 color index
|
||||
func makeColorHashAlphabet(units, colors int) (res [][]int) {
|
||||
res = make([][]int, units*colors)
|
||||
idx := 0
|
||||
for i := 0; i < units; i++ {
|
||||
for j := 0; j < colors; j++ {
|
||||
res[idx] = make([]int, 2)
|
||||
res[idx][0] = i + 1
|
||||
res[idx][1] = j
|
||||
idx++
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func toColorHash(value *big.Int, alphabet *[][]int, colorsCount int) (hash multiaccounts.ColorHash) {
|
||||
alphabetLen := len(*alphabet)
|
||||
indexes := identity.ToBigBase(value, uint64(alphabetLen))
|
||||
hash = make(multiaccounts.ColorHash, len(indexes))
|
||||
for i, v := range indexes {
|
||||
hash[i] = [2]int{}
|
||||
hash[i][0] = (*alphabet)[v][0]
|
||||
hash[i][1] = (*alphabet)[v][1]
|
||||
}
|
||||
|
||||
// colors can't repeat themselves
|
||||
// this makes color hash not fully collision resistant
|
||||
prevColorIdx := hash[0][1]
|
||||
hashLen := len(hash)
|
||||
for i := 1; i < hashLen; i++ {
|
||||
colorIdx := hash[i][1]
|
||||
if colorIdx == prevColorIdx {
|
||||
hash[i][1] = (colorIdx + 1) % colorsCount
|
||||
}
|
||||
prevColorIdx = hash[i][1]
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
94
vendor/github.com/status-im/status-go/protocol/identity/emojihash/emojihash.go
generated
vendored
Normal file
94
vendor/github.com/status-im/status-go/protocol/identity/emojihash/emojihash.go
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
package emojihash
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"math/big"
|
||||
"strings"
|
||||
|
||||
"github.com/status-im/status-go/protocol/identity"
|
||||
"github.com/status-im/status-go/static"
|
||||
)
|
||||
|
||||
const (
|
||||
emojiAlphabetLen = 2757 // 20bytes of data described by 14 emojis requires at least 2757 length alphabet
|
||||
emojiHashLen = 14
|
||||
)
|
||||
|
||||
var emojisAlphabet []string
|
||||
|
||||
func GenerateFor(pubkey string) ([]string, error) {
|
||||
if len(emojisAlphabet) == 0 {
|
||||
alphabet, err := loadAlphabet()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
emojisAlphabet = *alphabet
|
||||
}
|
||||
|
||||
compressedKey, err := identity.ToCompressedKey(pubkey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
slices, err := identity.Slices(compressedKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return toEmojiHash(new(big.Int).SetBytes(slices[1]), emojiHashLen, &emojisAlphabet)
|
||||
}
|
||||
|
||||
func loadAlphabet() (*[]string, error) {
|
||||
data, err := static.Asset("emojis.txt")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alphabet := make([]string, 0, emojiAlphabetLen)
|
||||
|
||||
scanner := bufio.NewScanner(bytes.NewReader(data))
|
||||
for scanner.Scan() {
|
||||
alphabet = append(alphabet, strings.Replace(scanner.Text(), "\n", "", -1))
|
||||
}
|
||||
|
||||
// current alphabet contains more emojis than needed, just in case some emojis needs to be removed
|
||||
// make sure only necessary part is loaded
|
||||
if len(alphabet) > emojiAlphabetLen {
|
||||
alphabet = alphabet[:emojiAlphabetLen]
|
||||
}
|
||||
|
||||
return &alphabet, nil
|
||||
}
|
||||
|
||||
func toEmojiHash(value *big.Int, hashLen int, alphabet *[]string) (hash []string, err error) {
|
||||
valueBitLen := value.BitLen()
|
||||
alphabetLen := new(big.Int).SetInt64(int64(len(*alphabet)))
|
||||
|
||||
indexes := identity.ToBigBase(value, alphabetLen.Uint64())
|
||||
if hashLen == 0 {
|
||||
hashLen = len(indexes)
|
||||
} else if hashLen > len(indexes) {
|
||||
prependLen := hashLen - len(indexes)
|
||||
for i := 0; i < prependLen; i++ {
|
||||
indexes = append([](uint64){0}, indexes...)
|
||||
}
|
||||
}
|
||||
|
||||
// alphabetLen^hashLen
|
||||
possibleCombinations := new(big.Int).Exp(alphabetLen, new(big.Int).SetInt64(int64(hashLen)), nil)
|
||||
|
||||
// 2^valueBitLen
|
||||
requiredCombinations := new(big.Int).Exp(new(big.Int).SetInt64(2), new(big.Int).SetInt64(int64(valueBitLen)), nil)
|
||||
|
||||
if possibleCombinations.Cmp(requiredCombinations) == -1 {
|
||||
return nil, errors.New("alphabet or hash length is too short to encode given value")
|
||||
}
|
||||
|
||||
for _, v := range indexes {
|
||||
hash = append(hash, (*alphabet)[v])
|
||||
}
|
||||
|
||||
return hash, nil
|
||||
}
|
||||
61
vendor/github.com/status-im/status-go/protocol/identity/identicon/identicon.go
generated
vendored
Normal file
61
vendor/github.com/status-im/status-go/protocol/identity/identicon/identicon.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
package identicon
|
||||
|
||||
import (
|
||||
"crypto/md5" // nolint: gosec
|
||||
"image/color"
|
||||
|
||||
"github.com/lucasb-eyer/go-colorful"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultSaturation = 0.5
|
||||
defaultLightness = 0.7
|
||||
)
|
||||
|
||||
type Identicon struct {
|
||||
bitmap []byte
|
||||
color color.Color
|
||||
}
|
||||
|
||||
func generate(key string) Identicon {
|
||||
hash := md5.Sum([]byte(key)) // nolint: gosec
|
||||
return Identicon{
|
||||
convertPatternToBinarySwitch(generatePatternFromHash(hash)),
|
||||
getColorFromHash(hash),
|
||||
}
|
||||
}
|
||||
|
||||
func getColorFromHash(h [16]byte) color.Color {
|
||||
// Take the last 3 relevant bytes, and convert to a float between [0..360]
|
||||
sum := float64(h[13]) + float64(h[14]) + float64(h[15])
|
||||
t := (sum / 765) * 360
|
||||
return colorful.Hsl(t, defaultSaturation, defaultLightness)
|
||||
}
|
||||
|
||||
func generatePatternFromHash(sum [16]byte) []byte {
|
||||
p := make([]byte, 25)
|
||||
for i := 0; i < 5; i++ {
|
||||
for j := 0; j < 5; j++ {
|
||||
jCount := j
|
||||
|
||||
if j > 2 {
|
||||
jCount = 4 - j
|
||||
}
|
||||
|
||||
p[5*i+j] = sum[3*i+jCount]
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func convertPatternToBinarySwitch(pattern []byte) []byte {
|
||||
b := make([]byte, 25)
|
||||
for i, v := range pattern {
|
||||
if v%2 == 0 {
|
||||
b[i] = 1
|
||||
} else {
|
||||
b[i] = 0
|
||||
}
|
||||
}
|
||||
return b
|
||||
}
|
||||
73
vendor/github.com/status-im/status-go/protocol/identity/identicon/renderer.go
generated
vendored
Normal file
73
vendor/github.com/status-im/status-go/protocol/identity/identicon/renderer.go
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
package identicon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"image"
|
||||
"image/color"
|
||||
"image/draw"
|
||||
"image/png"
|
||||
)
|
||||
|
||||
const (
|
||||
Width = 50
|
||||
Height = 50
|
||||
)
|
||||
|
||||
func renderBase64(id Identicon) (string, error) {
|
||||
img, err := render(id)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
encodedString := base64.StdEncoding.EncodeToString(img)
|
||||
image := "data:image/png;base64," + encodedString
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func setBackgroundTransparent(img *image.RGBA) {
|
||||
draw.Draw(img, img.Bounds(), &image.Uniform{C: color.Transparent}, image.Point{}, draw.Src)
|
||||
}
|
||||
|
||||
func drawRect(rgba *image.RGBA, i int, c color.Color) {
|
||||
sizeSquare := 6
|
||||
maxRow := 5
|
||||
|
||||
r := image.Rect(
|
||||
10+(i%maxRow)*sizeSquare,
|
||||
10+(i/maxRow)*sizeSquare,
|
||||
10+(i%maxRow)*sizeSquare+sizeSquare,
|
||||
10+(i/maxRow)*sizeSquare+sizeSquare,
|
||||
)
|
||||
|
||||
draw.Draw(rgba, r, &image.Uniform{C: c}, image.Point{}, draw.Src)
|
||||
}
|
||||
|
||||
func render(id Identicon) ([]byte, error) {
|
||||
img := image.NewRGBA(image.Rect(0, 0, Width, Height))
|
||||
var buff bytes.Buffer
|
||||
|
||||
setBackgroundTransparent(img)
|
||||
|
||||
for i, v := range id.bitmap {
|
||||
if v == 1 {
|
||||
drawRect(img, i, id.color)
|
||||
}
|
||||
}
|
||||
|
||||
if err := png.Encode(&buff, img); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buff.Bytes(), nil
|
||||
}
|
||||
|
||||
// GenerateBase64 generates an identicon in base64 png format given a string
|
||||
func GenerateBase64(id string) (string, error) {
|
||||
i := generate(id)
|
||||
return renderBase64(i)
|
||||
}
|
||||
|
||||
func Generate(id string) ([]byte, error) {
|
||||
i := generate(id)
|
||||
return render(i)
|
||||
}
|
||||
91
vendor/github.com/status-im/status-go/protocol/identity/ring/ring.go
generated
vendored
Normal file
91
vendor/github.com/status-im/status-go/protocol/identity/ring/ring.go
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
package ring
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/png"
|
||||
"math"
|
||||
|
||||
"github.com/fogleman/gg"
|
||||
|
||||
"github.com/status-im/status-go/multiaccounts"
|
||||
)
|
||||
|
||||
type Theme int
|
||||
|
||||
const (
|
||||
LightTheme Theme = 1
|
||||
DarkTheme Theme = 2
|
||||
)
|
||||
|
||||
var (
|
||||
lightThemeIdenticonRingColors = []string{
|
||||
"#000000", "#726F6F", "#C4C4C4", "#E7E7E7", "#FFFFFF", "#00FF00",
|
||||
"#009800", "#B8FFBB", "#FFC413", "#9F5947", "#FFFF00", "#A8AC00",
|
||||
"#FFFFB0", "#FF5733", "#FF0000", "#9A0000", "#FF9D9D", "#FF0099",
|
||||
"#C80078", "#FF00FF", "#900090", "#FFB0FF", "#9E00FF", "#0000FF",
|
||||
"#000086", "#9B81FF", "#3FAEF9", "#9A6600", "#00FFFF", "#008694",
|
||||
"#C2FFFF", "#00F0B6"}
|
||||
darkThemeIdenticonRingColors = []string{
|
||||
"#000000", "#726F6F", "#C4C4C4", "#E7E7E7", "#FFFFFF", "#00FF00",
|
||||
"#009800", "#B8FFBB", "#FFC413", "#9F5947", "#FFFF00", "#A8AC00",
|
||||
"#FFFFB0", "#FF5733", "#FF0000", "#9A0000", "#FF9D9D", "#FF0099",
|
||||
"#C80078", "#FF00FF", "#900090", "#FFB0FF", "#9E00FF", "#0000FF",
|
||||
"#000086", "#9B81FF", "#3FAEF9", "#9A6600", "#00FFFF", "#008694",
|
||||
"#C2FFFF", "#00F0B6"}
|
||||
)
|
||||
|
||||
type DrawRingParam struct {
|
||||
Theme Theme `json:"theme"`
|
||||
ColorHash multiaccounts.ColorHash `json:"colorHash"`
|
||||
ImageBytes []byte `json:"imageBytes"`
|
||||
Height int `json:"height"`
|
||||
Width int `json:"width"`
|
||||
RingWidth float64 `json:"ringWidth"`
|
||||
}
|
||||
|
||||
func DrawRing(param *DrawRingParam) ([]byte, error) {
|
||||
var colors []string
|
||||
switch param.Theme {
|
||||
case LightTheme:
|
||||
colors = lightThemeIdenticonRingColors
|
||||
case DarkTheme:
|
||||
colors = darkThemeIdenticonRingColors
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown theme")
|
||||
}
|
||||
|
||||
dc := gg.NewContext(param.Width, param.Height)
|
||||
img, _, err := image.Decode(bytes.NewReader(param.ImageBytes))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dc.DrawImage(img, 0, 0)
|
||||
|
||||
radius := (float64(param.Height) - param.RingWidth) / 2
|
||||
arcPos := 0.0
|
||||
|
||||
totalRingUnits := 0
|
||||
for i := 0; i < len(param.ColorHash); i++ {
|
||||
totalRingUnits += param.ColorHash[i][0]
|
||||
}
|
||||
unitRadLen := 2 * math.Pi / float64(totalRingUnits)
|
||||
|
||||
for i := 0; i < len(param.ColorHash); i++ {
|
||||
dc.SetHexColor(colors[param.ColorHash[i][1]])
|
||||
dc.DrawArc(float64(param.Width/2), float64(param.Height/2), radius, arcPos, arcPos+unitRadLen*float64(param.ColorHash[i][0]))
|
||||
dc.SetLineWidth(param.RingWidth)
|
||||
dc.SetLineCapButt()
|
||||
dc.Stroke()
|
||||
arcPos += unitRadLen * float64(param.ColorHash[i][0])
|
||||
}
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
err = png.Encode(buf, dc.Image())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
95
vendor/github.com/status-im/status-go/protocol/identity/social_links.go
generated
vendored
Normal file
95
vendor/github.com/status-im/status-go/protocol/identity/social_links.go
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
package identity
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
// static links which need to be decorated by the UI clients
|
||||
const (
|
||||
TwitterID = "__twitter"
|
||||
PersonalSiteID = "__personal_site"
|
||||
GithubID = "__github"
|
||||
YoutubeID = "__youtube"
|
||||
DiscordID = "__discord"
|
||||
TelegramID = "__telegram"
|
||||
)
|
||||
|
||||
type SocialLink struct {
|
||||
Text string `json:"text"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
type SocialLinks []*SocialLink
|
||||
|
||||
type SocialLinksInfo struct {
|
||||
Links []*SocialLink `json:"links"`
|
||||
Removed bool `json:"removed"`
|
||||
}
|
||||
|
||||
func NewSocialLinks(links []*protobuf.SocialLink) SocialLinks {
|
||||
res := SocialLinks{}
|
||||
for _, link := range links {
|
||||
res = append(res, &SocialLink{Text: link.Text, URL: link.Url})
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (s *SocialLink) ToProtobuf() *protobuf.SocialLink {
|
||||
return &protobuf.SocialLink{
|
||||
Text: s.Text,
|
||||
Url: s.URL,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SocialLink) Equal(link *SocialLink) bool {
|
||||
return s.Text == link.Text && s.URL == link.URL
|
||||
}
|
||||
|
||||
func (s *SocialLinks) ToProtobuf() []*protobuf.SocialLink {
|
||||
res := []*protobuf.SocialLink{}
|
||||
for _, link := range *s {
|
||||
res = append(res, link.ToProtobuf())
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (s *SocialLinks) ToSyncProtobuf(clock uint64) *protobuf.SyncSocialLinks {
|
||||
res := &protobuf.SyncSocialLinks{
|
||||
Clock: clock,
|
||||
}
|
||||
for _, link := range *s {
|
||||
res.SocialLinks = append(res.SocialLinks, link.ToProtobuf())
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// Equal means the same links at the same order
|
||||
func (s *SocialLinks) Equal(links SocialLinks) bool {
|
||||
if len(*s) != len(links) {
|
||||
return false
|
||||
}
|
||||
for i := range *s {
|
||||
if !(*s)[i].Equal(links[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *SocialLinks) Contains(link *SocialLink) bool {
|
||||
if len(*s) == 0 {
|
||||
return false
|
||||
}
|
||||
for _, l := range *s {
|
||||
if l.Equal(link) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *SocialLinks) Serialize() ([]byte, error) {
|
||||
return json.Marshal(*s)
|
||||
}
|
||||
81
vendor/github.com/status-im/status-go/protocol/identity/utils.go
generated
vendored
Normal file
81
vendor/github.com/status-im/status-go/protocol/identity/utils.go
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
package identity
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/crypto/secp256k1"
|
||||
)
|
||||
|
||||
func ToColorID(pubkey string) (int64, error) {
|
||||
const colorPalletLength = 12
|
||||
|
||||
pubkeyValue, ok := new(big.Int).SetString(pubkey, 0)
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("invalid pubkey: %s", pubkey)
|
||||
}
|
||||
|
||||
colorID := new(big.Int).Mod(pubkeyValue, new(big.Int).SetInt64(colorPalletLength-1)).Int64()
|
||||
|
||||
return colorID, nil
|
||||
}
|
||||
|
||||
func ToBigBase(value *big.Int, base uint64) (res [](uint64)) {
|
||||
toBigBaseImpl(value, base, &res)
|
||||
return
|
||||
}
|
||||
|
||||
func toBigBaseImpl(value *big.Int, base uint64, res *[](uint64)) {
|
||||
bigBase := new(big.Int).SetUint64(base)
|
||||
quotient := new(big.Int).Div(value, bigBase)
|
||||
if quotient.Cmp(new(big.Int).SetUint64(0)) != 0 {
|
||||
toBigBaseImpl(quotient, base, res)
|
||||
}
|
||||
|
||||
*res = append(*res, new(big.Int).Mod(value, bigBase).Uint64())
|
||||
}
|
||||
|
||||
// compressedPubKey = |1.5 bytes chars cutoff|20 bytes emoji hash|10 bytes color hash|1.5 bytes chars cutoff|
|
||||
func Slices(compressedPubkey []byte) (res [4][]byte, err error) {
|
||||
if len(compressedPubkey) != 33 {
|
||||
return res, errors.New("incorrect compressed pubkey")
|
||||
}
|
||||
|
||||
getSlice := func(low, high int, and string, rsh uint) []byte {
|
||||
sliceValue := new(big.Int).SetBytes(compressedPubkey[low:high])
|
||||
andValue, _ := new(big.Int).SetString(and, 0)
|
||||
andRes := new(big.Int).And(sliceValue, andValue)
|
||||
return new(big.Int).Rsh(andRes, rsh).Bytes()
|
||||
}
|
||||
|
||||
res[0] = getSlice(0, 2, "0xFFF0", 4)
|
||||
res[1] = getSlice(1, 22, "0x0FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0", 4)
|
||||
res[2] = getSlice(21, 32, "0x0FFFFFFFFFFFFFFFFFFFF0", 4)
|
||||
res[3] = getSlice(31, 33, "0x0FFF", 0)
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func ToCompressedKey(pubkey string) ([]byte, error) {
|
||||
pubkeyValue, ok := new(big.Int).SetString(pubkey, 0)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid pubkey: %s", pubkey)
|
||||
}
|
||||
|
||||
x, y := secp256k1.S256().Unmarshal(pubkeyValue.Bytes())
|
||||
if x == nil || !secp256k1.S256().IsOnCurve(x, y) {
|
||||
return nil, fmt.Errorf("invalid pubkey: %s", pubkey)
|
||||
}
|
||||
|
||||
return secp256k1.CompressPubkey(x, y), nil
|
||||
}
|
||||
|
||||
func ToBigInt(t *testing.T, str string) *big.Int {
|
||||
res, ok := new(big.Int).SetString(str, 0)
|
||||
if !ok {
|
||||
t.Errorf("invalid conversion to int from %s", str)
|
||||
}
|
||||
return res
|
||||
}
|
||||
109
vendor/github.com/status-im/status-go/protocol/identity_images.go
generated
vendored
Normal file
109
vendor/github.com/status-im/status-go/protocol/identity_images.go
generated
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
crand "crypto/rand"
|
||||
"errors"
|
||||
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
var ErrCipherMessageAutentificationFailed = "cipher: message authentication failed"
|
||||
|
||||
func EncryptIdentityImagesWithContactPubKeys(iis map[string]*protobuf.IdentityImage, m *Messenger) (err error) {
|
||||
// Make AES key
|
||||
AESKey := make([]byte, 32)
|
||||
_, err = crand.Read(AESKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, ii := range iis {
|
||||
// Encrypt image payload with the AES key
|
||||
var encryptedPayload []byte
|
||||
encryptedPayload, err = common.Encrypt(ii.Payload, AESKey, crand.Reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Overwrite the unencrypted payload with the newly encrypted payload
|
||||
ii.Payload = encryptedPayload
|
||||
ii.Encrypted = true
|
||||
m.allContacts.Range(func(contactID string, contact *Contact) (shouldContinue bool) {
|
||||
if !contact.added() {
|
||||
return true
|
||||
}
|
||||
var pubK *ecdsa.PublicKey
|
||||
var sharedKey []byte
|
||||
var eAESKey []byte
|
||||
|
||||
pubK, err = contact.PublicKey()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
// Generate a Diffie-Helman (DH) between the sender private key and the recipient's public key
|
||||
sharedKey, err = common.MakeECDHSharedKey(m.identity, pubK)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
// Encrypt the main AES key with AES encryption using the DH key
|
||||
eAESKey, err = common.Encrypt(AESKey, sharedKey, crand.Reader)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
// Append the the encrypted main AES key to the IdentityImage's EncryptionKeys slice.
|
||||
ii.EncryptionKeys = append(ii.EncryptionKeys, eAESKey)
|
||||
return true
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func DecryptIdentityImagesWithIdentityPrivateKey(iis map[string]*protobuf.IdentityImage, recipientIdentity *ecdsa.PrivateKey, senderPubKey *ecdsa.PublicKey) error {
|
||||
image:
|
||||
for _, ii := range iis {
|
||||
for _, empk := range ii.EncryptionKeys {
|
||||
// Generate a Diffie-Helman (DH) between the recipient's private key and the sender's public key
|
||||
sharedKey, err := common.MakeECDHSharedKey(recipientIdentity, senderPubKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Decrypt the main encryption AES key with AES encryption using the DH key
|
||||
dAESKey, err := common.Decrypt(empk, sharedKey)
|
||||
if err != nil {
|
||||
if err.Error() == ErrCipherMessageAutentificationFailed {
|
||||
continue
|
||||
}
|
||||
return err
|
||||
}
|
||||
if dAESKey == nil {
|
||||
return errors.New("decrypting the payload encryption key resulted in no error and a nil key")
|
||||
}
|
||||
|
||||
// Decrypt the payload with the newly decrypted main encryption AES key
|
||||
payload, err := common.Decrypt(ii.Payload, dAESKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if payload == nil {
|
||||
// TODO should this be a logger warn? A payload could theoretically be validly empty
|
||||
return errors.New("decrypting the payload resulted in no error and a nil payload")
|
||||
}
|
||||
|
||||
// Overwrite the payload with the decrypted data
|
||||
ii.Payload = payload
|
||||
ii.Encrypted = false
|
||||
continue image
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
80
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler.go
generated
vendored
Normal file
80
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler.go
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
neturl "net/url"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultRequestTimeout = 15000 * time.Millisecond
|
||||
|
||||
headerAcceptJSON = "application/json; charset=utf-8"
|
||||
headerAcceptText = "text/html; charset=utf-8"
|
||||
|
||||
// Without a particular user agent, many providers treat status-go as a
|
||||
// gluttony bot, and either respond more frequently with a 429 (Too Many
|
||||
// Requests), or simply refuse to return valid data. Note that using a known
|
||||
// browser UA doesn't work well with some providers, such as Spotify,
|
||||
// apparently they still flag status-go as a bad actor.
|
||||
headerUserAgent = "status-go/v0.151.15"
|
||||
|
||||
// Currently set to English, but we could make this setting dynamic according
|
||||
// to the user's language of choice.
|
||||
headerAcceptLanguage = "en-US,en;q=0.5"
|
||||
)
|
||||
|
||||
type Headers map[string]string
|
||||
|
||||
type Unfurler interface {
|
||||
Unfurl() (*common.LinkPreview, error)
|
||||
}
|
||||
|
||||
func newDefaultLinkPreview(url *neturl.URL) *common.LinkPreview {
|
||||
return &common.LinkPreview{
|
||||
URL: url.String(),
|
||||
Hostname: url.Hostname(),
|
||||
}
|
||||
}
|
||||
|
||||
func fetchBody(logger *zap.Logger, httpClient *http.Client, url string, headers Headers) ([]byte, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), DefaultRequestTimeout)
|
||||
defer cancel()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to perform HTTP request: %w", err)
|
||||
}
|
||||
|
||||
for k, v := range headers {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
|
||||
res, err := httpClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
if err := res.Body.Close(); err != nil {
|
||||
logger.Error("failed to close response body", zap.Error(err))
|
||||
}
|
||||
}()
|
||||
|
||||
if res.StatusCode >= http.StatusBadRequest {
|
||||
return nil, fmt.Errorf("http request failed, statusCode='%d'", res.StatusCode)
|
||||
}
|
||||
|
||||
bodyBytes, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read body bytes: %w", err)
|
||||
}
|
||||
|
||||
return bodyBytes, nil
|
||||
}
|
||||
115
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_image.go
generated
vendored
Normal file
115
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_image.go
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
neturl "net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
const (
|
||||
maxImageSize = 1024 * 350
|
||||
)
|
||||
|
||||
var imageURLRegexp = regexp.MustCompile(`(?i)^.+(png|jpg|jpeg|webp)$`)
|
||||
|
||||
type ImageUnfurler struct {
|
||||
url *neturl.URL
|
||||
logger *zap.Logger
|
||||
httpClient *http.Client
|
||||
}
|
||||
|
||||
func NewImageUnfurler(URL *neturl.URL, logger *zap.Logger, httpClient *http.Client) *ImageUnfurler {
|
||||
return &ImageUnfurler{
|
||||
url: URL,
|
||||
logger: logger,
|
||||
httpClient: httpClient,
|
||||
}
|
||||
}
|
||||
|
||||
func compressImage(imgBytes []byte) ([]byte, error) {
|
||||
smallest := imgBytes
|
||||
|
||||
img, err := images.DecodeImageData(imgBytes, bytes.NewReader(imgBytes))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
compressed := bytes.NewBuffer([]byte{})
|
||||
err = images.CompressToFileLimits(compressed, img, images.DefaultBounds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(compressed.Bytes()) < len(smallest) {
|
||||
smallest = compressed.Bytes()
|
||||
}
|
||||
|
||||
if len(smallest) > maxImageSize {
|
||||
return nil, errors.New("image too large")
|
||||
}
|
||||
|
||||
return smallest, nil
|
||||
}
|
||||
|
||||
// IsSupportedImageURL detects whether a URL ends with one of the
|
||||
// supported image extensions. It provides a quick way to identify whether URLs
|
||||
// should be unfurled as images without needing to retrieve the full response
|
||||
// body first.
|
||||
func IsSupportedImageURL(url *neturl.URL) bool {
|
||||
return imageURLRegexp.MatchString(url.Path)
|
||||
}
|
||||
|
||||
// isSupportedImage returns true when payload is one of the supported image
|
||||
// types. In the future, we should differentiate between animated and
|
||||
// non-animated WebP because, currently, only static WebP can be processed by
|
||||
// functions in the status-go/images package.
|
||||
func isSupportedImage(payload []byte) bool {
|
||||
return images.IsJpeg(payload) || images.IsPng(payload) || images.IsWebp(payload)
|
||||
}
|
||||
|
||||
func (u *ImageUnfurler) Unfurl() (*common.LinkPreview, error) {
|
||||
preview := newDefaultLinkPreview(u.url)
|
||||
preview.Type = protobuf.UnfurledLink_IMAGE
|
||||
|
||||
headers := map[string]string{"user-agent": headerUserAgent}
|
||||
imgBytes, err := fetchBody(u.logger, u.httpClient, u.url.String(), headers)
|
||||
if err != nil {
|
||||
return preview, err
|
||||
}
|
||||
|
||||
if !isSupportedImage(imgBytes) {
|
||||
return preview, fmt.Errorf("unsupported image type url='%s'", u.url.String())
|
||||
}
|
||||
|
||||
compressedBytes, err := compressImage(imgBytes)
|
||||
if err != nil {
|
||||
return preview, fmt.Errorf("failed to compress image url='%s': %w", u.url.String(), err)
|
||||
}
|
||||
|
||||
width, height, err := images.GetImageDimensions(compressedBytes)
|
||||
if err != nil {
|
||||
return preview, fmt.Errorf("could not get image dimensions url='%s': %w", u.url.String(), err)
|
||||
}
|
||||
|
||||
dataURI, err := images.GetPayloadDataURI(compressedBytes)
|
||||
if err != nil {
|
||||
return preview, fmt.Errorf("could not build data URI url='%s': %w", u.url.String(), err)
|
||||
}
|
||||
|
||||
preview.Title = path.Base(u.url.Path)
|
||||
preview.Thumbnail.Width = width
|
||||
preview.Thumbnail.Height = height
|
||||
preview.Thumbnail.DataURI = dataURI
|
||||
|
||||
return preview, nil
|
||||
}
|
||||
93
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_oembed.go
generated
vendored
Normal file
93
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_oembed.go
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
neturl "net/url"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type OEmbedUnfurler struct {
|
||||
logger *zap.Logger
|
||||
httpClient *http.Client
|
||||
// oembedEndpoint describes where the consumer may request representations for
|
||||
// the supported URL scheme. For example, for YouTube, it is
|
||||
// https://www.youtube.com/oembed.
|
||||
oembedEndpoint string
|
||||
// url is the actual URL to be unfurled.
|
||||
url *neturl.URL
|
||||
}
|
||||
|
||||
func NewOEmbedUnfurler(oembedEndpoint string,
|
||||
url *neturl.URL,
|
||||
logger *zap.Logger,
|
||||
httpClient *http.Client) *OEmbedUnfurler {
|
||||
return &OEmbedUnfurler{
|
||||
oembedEndpoint: oembedEndpoint,
|
||||
url: url,
|
||||
logger: logger,
|
||||
httpClient: httpClient,
|
||||
}
|
||||
}
|
||||
|
||||
type OEmbedResponse struct {
|
||||
Title string `json:"title"`
|
||||
ThumbnailURL string `json:"thumbnail_url"`
|
||||
}
|
||||
|
||||
func (u *OEmbedUnfurler) newOEmbedURL() (*neturl.URL, error) {
|
||||
oembedURL, err := neturl.Parse(u.oembedEndpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// When format is specified, the provider MUST return data in the requested
|
||||
// format, else return an error.
|
||||
oembedURL.RawQuery = neturl.Values{
|
||||
"url": {u.url.String()},
|
||||
"format": {"json"},
|
||||
}.Encode()
|
||||
|
||||
return oembedURL, nil
|
||||
}
|
||||
|
||||
func (u OEmbedUnfurler) Unfurl() (*common.LinkPreview, error) {
|
||||
preview := newDefaultLinkPreview(u.url)
|
||||
preview.Type = protobuf.UnfurledLink_LINK
|
||||
|
||||
oembedURL, err := u.newOEmbedURL()
|
||||
if err != nil {
|
||||
return preview, err
|
||||
}
|
||||
|
||||
headers := map[string]string{
|
||||
"accept": headerAcceptJSON,
|
||||
"accept-language": headerAcceptLanguage,
|
||||
"user-agent": headerUserAgent,
|
||||
}
|
||||
oembedBytes, err := fetchBody(u.logger, u.httpClient, oembedURL.String(), headers)
|
||||
if err != nil {
|
||||
return preview, err
|
||||
}
|
||||
|
||||
var oembedResponse OEmbedResponse
|
||||
if err != nil {
|
||||
return preview, err
|
||||
}
|
||||
err = json.Unmarshal(oembedBytes, &oembedResponse)
|
||||
if err != nil {
|
||||
return preview, err
|
||||
}
|
||||
|
||||
if oembedResponse.Title == "" {
|
||||
return preview, fmt.Errorf("missing required title in oEmbed response")
|
||||
}
|
||||
|
||||
preview.Title = oembedResponse.Title
|
||||
return preview, nil
|
||||
}
|
||||
106
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_opengraph.go
generated
vendored
Normal file
106
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_opengraph.go
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
neturl "net/url"
|
||||
|
||||
"github.com/keighl/metabolize"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/protobuf"
|
||||
)
|
||||
|
||||
type OpenGraphMetadata struct {
|
||||
Title string `json:"title" meta:"og:title"`
|
||||
Description string `json:"description" meta:"og:description"`
|
||||
ThumbnailURL string `json:"thumbnailUrl" meta:"og:image"`
|
||||
}
|
||||
|
||||
// OpenGraphUnfurler should be preferred over OEmbedUnfurler because oEmbed
|
||||
// gives back a JSON response with a "html" field that's supposed to be embedded
|
||||
// in an iframe (hardly useful for existing Status' clients).
|
||||
type OpenGraphUnfurler struct {
|
||||
url *neturl.URL
|
||||
logger *zap.Logger
|
||||
httpClient *http.Client
|
||||
}
|
||||
|
||||
func NewOpenGraphUnfurler(URL *neturl.URL, logger *zap.Logger, httpClient *http.Client) *OpenGraphUnfurler {
|
||||
return &OpenGraphUnfurler{
|
||||
url: URL,
|
||||
logger: logger,
|
||||
httpClient: httpClient,
|
||||
}
|
||||
}
|
||||
|
||||
func (u *OpenGraphUnfurler) Unfurl() (*common.LinkPreview, error) {
|
||||
preview := newDefaultLinkPreview(u.url)
|
||||
preview.Type = protobuf.UnfurledLink_LINK
|
||||
|
||||
headers := map[string]string{
|
||||
"accept": headerAcceptText,
|
||||
"accept-language": headerAcceptLanguage,
|
||||
"user-agent": headerUserAgent,
|
||||
}
|
||||
bodyBytes, err := fetchBody(u.logger, u.httpClient, u.url.String(), headers)
|
||||
if err != nil {
|
||||
return preview, err
|
||||
}
|
||||
|
||||
var ogMetadata OpenGraphMetadata
|
||||
err = metabolize.Metabolize(ioutil.NopCloser(bytes.NewBuffer(bodyBytes)), &ogMetadata)
|
||||
if err != nil {
|
||||
return preview, fmt.Errorf("failed to parse OpenGraph data")
|
||||
}
|
||||
|
||||
// There are URLs like https://wikipedia.org/ that don't have an OpenGraph
|
||||
// title tag, but article pages do. In the future, we can fallback to the
|
||||
// website's title by using the <title> tag.
|
||||
if ogMetadata.Title == "" {
|
||||
return preview, fmt.Errorf("missing required title in OpenGraph response")
|
||||
}
|
||||
|
||||
if ogMetadata.ThumbnailURL != "" {
|
||||
t, err := fetchThumbnail(u.logger, u.httpClient, ogMetadata.ThumbnailURL)
|
||||
if err != nil {
|
||||
// Given we want to fetch thumbnails on a best-effort basis, if an error
|
||||
// happens we simply log it.
|
||||
u.logger.Info("failed to fetch thumbnail", zap.String("url", u.url.String()), zap.Error(err))
|
||||
} else {
|
||||
preview.Thumbnail = t
|
||||
}
|
||||
}
|
||||
|
||||
preview.Title = ogMetadata.Title
|
||||
preview.Description = ogMetadata.Description
|
||||
return preview, nil
|
||||
}
|
||||
|
||||
func fetchThumbnail(logger *zap.Logger, httpClient *http.Client, url string) (common.LinkPreviewThumbnail, error) {
|
||||
var thumbnail common.LinkPreviewThumbnail
|
||||
|
||||
imgBytes, err := fetchBody(logger, httpClient, url, nil)
|
||||
if err != nil {
|
||||
return thumbnail, fmt.Errorf("could not fetch thumbnail url='%s': %w", url, err)
|
||||
}
|
||||
|
||||
width, height, err := images.GetImageDimensions(imgBytes)
|
||||
if err != nil {
|
||||
return thumbnail, fmt.Errorf("could not get image dimensions url='%s': %w", url, err)
|
||||
}
|
||||
thumbnail.Width = width
|
||||
thumbnail.Height = height
|
||||
|
||||
dataURI, err := images.GetPayloadDataURI(imgBytes)
|
||||
if err != nil {
|
||||
return thumbnail, fmt.Errorf("could not build data URI url='%s': %w", url, err)
|
||||
}
|
||||
thumbnail.DataURI = dataURI
|
||||
|
||||
return thumbnail, nil
|
||||
}
|
||||
172
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_status.go
generated
vendored
Normal file
172
vendor/github.com/status-im/status-go/protocol/linkpreview_unfurler_status.go
generated
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
package protocol
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/status-im/status-go/api/multiformat"
|
||||
"github.com/status-im/status-go/images"
|
||||
"github.com/status-im/status-go/protocol/common"
|
||||
"github.com/status-im/status-go/protocol/common/shard"
|
||||
"github.com/status-im/status-go/protocol/communities"
|
||||
)
|
||||
|
||||
type StatusUnfurler struct {
|
||||
m *Messenger
|
||||
logger *zap.Logger
|
||||
url string
|
||||
}
|
||||
|
||||
func NewStatusUnfurler(URL string, messenger *Messenger, logger *zap.Logger) *StatusUnfurler {
|
||||
return &StatusUnfurler{
|
||||
m: messenger,
|
||||
logger: logger.With(zap.String("url", URL)),
|
||||
url: URL,
|
||||
}
|
||||
}
|
||||
|
||||
func updateThumbnail(image *images.IdentityImage, thumbnail *common.LinkPreviewThumbnail) error {
|
||||
if image.IsEmpty() {
|
||||
return nil
|
||||
}
|
||||
|
||||
width, height, err := images.GetImageDimensions(image.Payload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get image dimensions: %w", err)
|
||||
}
|
||||
|
||||
dataURI, err := image.GetDataURI()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get data uri: %w", err)
|
||||
}
|
||||
|
||||
thumbnail.Width = width
|
||||
thumbnail.Height = height
|
||||
thumbnail.DataURI = dataURI
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (u *StatusUnfurler) buildContactData(publicKey string) (*common.StatusContactLinkPreview, error) {
|
||||
// contactID == "0x" + secp251k1 compressed public key as hex-encoded string
|
||||
contactID, err := multiformat.DeserializeCompressedKey(publicKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
contact := u.m.GetContactByID(contactID)
|
||||
|
||||
// If no contact found locally, fetch it from waku
|
||||
if contact == nil {
|
||||
if contact, err = u.m.FetchContact(contactID, true); err != nil {
|
||||
return nil, fmt.Errorf("failed to request contact info from mailserver for public key '%s': %w", publicKey, err)
|
||||
}
|
||||
}
|
||||
|
||||
c := &common.StatusContactLinkPreview{
|
||||
PublicKey: contactID,
|
||||
DisplayName: contact.DisplayName,
|
||||
Description: contact.Bio,
|
||||
}
|
||||
|
||||
if image, ok := contact.Images[images.SmallDimName]; ok {
|
||||
if err = updateThumbnail(&image, &c.Icon); err != nil {
|
||||
u.logger.Warn("unfurling status link: failed to set contact thumbnail", zap.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (u *StatusUnfurler) buildCommunityData(communityID string, shard *shard.Shard) (*communities.Community, *common.StatusCommunityLinkPreview, error) {
|
||||
// This automatically checks the database
|
||||
community, err := u.m.FetchCommunity(&FetchCommunityRequest{
|
||||
CommunityKey: communityID,
|
||||
Shard: shard,
|
||||
TryDatabase: true,
|
||||
WaitForResponse: true,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to get community info for communityID '%s': %w", communityID, err)
|
||||
}
|
||||
|
||||
if community == nil {
|
||||
return community, nil, fmt.Errorf("community info fetched, but it is empty")
|
||||
}
|
||||
|
||||
statusCommunityLinkPreviews, err := community.ToStatusLinkPreview()
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to get status community link preview for communityID '%s': %w", communityID, err)
|
||||
}
|
||||
|
||||
return community, statusCommunityLinkPreviews, nil
|
||||
}
|
||||
|
||||
func (u *StatusUnfurler) buildChannelData(channelUUID string, communityID string, communityShard *shard.Shard) (*common.StatusCommunityChannelLinkPreview, error) {
|
||||
community, communityData, err := u.buildCommunityData(communityID, communityShard)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build channel community data: %w", err)
|
||||
}
|
||||
|
||||
channel, ok := community.Chats()[channelUUID]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("channel with channelID '%s' not found in community '%s'", channelUUID, communityID)
|
||||
}
|
||||
|
||||
return &common.StatusCommunityChannelLinkPreview{
|
||||
ChannelUUID: channelUUID,
|
||||
Emoji: channel.Identity.Emoji,
|
||||
DisplayName: channel.Identity.DisplayName,
|
||||
Description: channel.Identity.Description,
|
||||
Color: channel.Identity.Color,
|
||||
Community: communityData,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (u *StatusUnfurler) Unfurl() (*common.StatusLinkPreview, error) {
|
||||
preview := new(common.StatusLinkPreview)
|
||||
preview.URL = u.url
|
||||
|
||||
resp, err := ParseSharedURL(u.url)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse shared url: %w", err)
|
||||
}
|
||||
|
||||
// If a URL has been successfully parsed,
|
||||
// any further errors should not be returned, only logged.
|
||||
|
||||
if resp.Contact != nil {
|
||||
preview.Contact, err = u.buildContactData(resp.Contact.PublicKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when building contact data: %w", err)
|
||||
}
|
||||
return preview, nil
|
||||
}
|
||||
|
||||
// NOTE: Currently channel data comes together with community data,
|
||||
// both `Community` and `Channel` fields will be present.
|
||||
// So we check for Channel first, then Community.
|
||||
|
||||
if resp.Channel != nil {
|
||||
if resp.Community == nil {
|
||||
return preview, fmt.Errorf("channel community can't be empty")
|
||||
}
|
||||
preview.Channel, err = u.buildChannelData(resp.Channel.ChannelUUID, resp.Community.CommunityID, resp.Shard)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when building channel data: %w", err)
|
||||
}
|
||||
return preview, nil
|
||||
}
|
||||
|
||||
if resp.Community != nil {
|
||||
_, preview.Community, err = u.buildCommunityData(resp.Community.CommunityID, resp.Shard)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error when building community data: %w", err)
|
||||
}
|
||||
return preview, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("shared url does not contain contact, community or channel data")
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user