Versions in this module Expand all Collapse all v1 v1.0.0 Jul 5, 2019 Changes in this version + const AddressLength + const BMTHash + const ChunkProcessors + const CurrentDbSchema + const DbSchemaHalloween + const DbSchemaNone + const DbSchemaPurity + const DefaultHash + const ErrDataOverflow + const ErrInit + const ErrInvalidSignature + const ErrInvalidValue + const ErrNotFound + const ErrNotSynced + const ErrNothingToReturn + const ErrUnauthorized + const MaxPO + const SHA3Hash + var ErrChunkInvalid = errors.New("invalid chunk") + var ErrChunkNotFound = errors.New("chunk not found") + var ErrDBClosed = errors.New("LDBStore closed") + var ZeroAddr = Address(common.Hash{}.Bytes()) + func BytesToU64(data []byte) uint64 + func NewChunk(addr Address, data []byte) *chunk + func NewHasherStore(store ChunkStore, hashFunc SwarmHasher, toEncrypt bool) *hasherStore + func Proximity(one, other []byte) (ret int) + func U64ToBytes(val uint64) []byte + type Address []byte + func PyramidAppend(ctx context.Context, addr Address, reader io.Reader, putter Putter, ...) (Address, func(context.Context) error, error) + func PyramidSplit(ctx context.Context, reader io.Reader, putter Putter, getter Getter) (Address, func(context.Context) error, error) + func TreeSplit(ctx context.Context, data io.Reader, size int64, putter Putter) (k Address, wait func(context.Context) error, err error) + func (a *Address) UnmarshalJSON(value []byte) error + func (a Address) Hex() string + func (a Address) Log() string + func (a Address) MarshalJSON() (out []byte, err error) + func (a Address) String() string + type AddressCollection []Address + func NewAddressCollection(l int) AddressCollection + func (c AddressCollection) Len() int + func (c AddressCollection) Less(i, j int) bool + func (c AddressCollection) Swap(i, j int) + type Chunk interface + Address func() Address + Data func() []byte + func GenerateRandomChunk(dataSize int64) Chunk + func GenerateRandomChunks(dataSize int64, count int) (chunks []Chunk) + type ChunkData []byte + func (c ChunkData) Size() uint64 + type ChunkStore interface + Close func() + Get func(rctx context.Context, ref Address) (ch Chunk, err error) + Has func(rctx context.Context, ref Address) bool + Put func(ctx context.Context, ch Chunk) (err error) + type ChunkValidator interface + Validate func(chunk Chunk) bool + type ChunkerParams struct + type ContentAddressValidator struct + Hasher SwarmHasher + func NewContentAddressValidator(hasher SwarmHasher) *ContentAddressValidator + func (v *ContentAddressValidator) Validate(chunk Chunk) bool + type FakeChunkStore struct + func (f *FakeChunkStore) Close() + func (f *FakeChunkStore) Get(_ context.Context, ref Address) (Chunk, error) + func (f *FakeChunkStore) Has(_ context.Context, ref Address) bool + func (f *FakeChunkStore) Put(_ context.Context, ch Chunk) error + type FileStore struct + func NewFileStore(store ChunkStore, params *FileStoreParams) *FileStore + func NewLocalFileStore(datadir string, basekey []byte) (*FileStore, error) + func (f *FileStore) GetAllReferences(ctx context.Context, data io.Reader, toEncrypt bool) (addrs AddressCollection, err error) + func (f *FileStore) HashSize() int + func (f *FileStore) Retrieve(ctx context.Context, addr Address) (reader *LazyChunkReader, isEncrypted bool) + func (f *FileStore) Store(ctx context.Context, data io.Reader, size int64, toEncrypt bool) (addr Address, wait func(context.Context) error, err error) + type FileStoreParams struct + Hash string + func NewFileStoreParams() *FileStoreParams + type Getter interface + Get func(context.Context, Reference) (ChunkData, error) + type HashWithLength struct + func (h *HashWithLength) ResetWithLength(length []byte) + type JoinerParams struct + type LDBDatabase struct + func NewLDBDatabase(file string) (*LDBDatabase, error) + func (db *LDBDatabase) Close() + func (db *LDBDatabase) Delete(key []byte) error + func (db *LDBDatabase) Get(key []byte) ([]byte, error) + func (db *LDBDatabase) NewIterator() iterator.Iterator + func (db *LDBDatabase) Put(key []byte, value []byte) error + func (db *LDBDatabase) Write(batch *leveldb.Batch) error + type LDBStore struct + func NewLDBStore(params *LDBStoreParams) (s *LDBStore, err error) + func NewMockDbStore(params *LDBStoreParams, mockStore *mock.NodeStore) (s *LDBStore, err error) + func (s *LDBStore) BinIndex(po uint8) uint64 + func (s *LDBStore) CleanGCIndex() error + func (s *LDBStore) Cleanup(f func(*chunk) bool) + func (s *LDBStore) Close() + func (s *LDBStore) Delete(addr Address) error + func (s *LDBStore) Export(out io.Writer) (int64, error) + func (s *LDBStore) Get(_ context.Context, addr Address) (chunk Chunk, err error) + func (s *LDBStore) GetSchema() (string, error) + func (s *LDBStore) Has(_ context.Context, addr Address) bool + func (s *LDBStore) Import(in io.Reader) (int64, error) + func (s *LDBStore) MarkAccessed(addr Address) + func (s *LDBStore) Put(ctx context.Context, chunk Chunk) error + func (s *LDBStore) PutSchema(schema string) error + func (s *LDBStore) SyncIterator(since uint64, until uint64, po uint8, f func(Address, uint64) bool) error + type LDBStoreParams struct + Path string + Po func(Address) uint8 + func NewLDBStoreParams(storeparams *StoreParams, path string) *LDBStoreParams + type LazyChunkReader struct + func TreeJoin(ctx context.Context, addr Address, getter Getter, depth int) *LazyChunkReader + func (r *LazyChunkReader) Context() context.Context + func (r *LazyChunkReader) Read(b []byte) (read int, err error) + func (r *LazyChunkReader) ReadAt(b []byte, off int64) (read int, err error) + func (r *LazyChunkReader) Seek(offset int64, whence int) (int64, error) + func (r *LazyChunkReader) Size(ctx context.Context, quitC chan bool) (n int64, err error) + type LazySectionReader interface + Context func() context.Context + Size func(context.Context, chan bool) (int64, error) + type LazyTestSectionReader struct + func (r *LazyTestSectionReader) Context() context.Context + func (r *LazyTestSectionReader) Size(context.Context, chan bool) (int64, error) + type LocalStore struct + DbStore *LDBStore + Validators []ChunkValidator + func NewLocalStore(params *LocalStoreParams, mockStore *mock.NodeStore) (*LocalStore, error) + func NewTestLocalStoreForAddr(params *LocalStoreParams) (*LocalStore, error) + func (ls *LocalStore) BinIndex(po uint8) uint64 + func (ls *LocalStore) Close() + func (ls *LocalStore) FetchFunc(ctx context.Context, addr Address) func(context.Context) error + func (ls *LocalStore) Get(ctx context.Context, addr Address) (chunk Chunk, err error) + func (ls *LocalStore) Has(ctx context.Context, addr Address) bool + func (ls *LocalStore) Iterator(from uint64, to uint64, po uint8, f func(Address, uint64) bool) error + func (ls *LocalStore) Migrate() error + func (ls *LocalStore) Put(ctx context.Context, chunk Chunk) error + type LocalStoreParams struct + ChunkDbPath string + Validators []ChunkValidator + func NewDefaultLocalStoreParams() *LocalStoreParams + func (p *LocalStoreParams) Init(path string) + type MemStore struct + func NewMemStore(params *StoreParams, _ *LDBStore) (m *MemStore) + func (m *MemStore) Get(_ context.Context, addr Address) (Chunk, error) + func (m *MemStore) Has(_ context.Context, addr Address) bool + func (m *MemStore) Put(_ context.Context, c Chunk) error + func (s *MemStore) Close() + type NetFetcher interface + Offer func(source *enode.ID) + Request func(hopCount uint8) + type NetStore struct + NewNetFetcherFunc NewNetFetcherFunc + func NewNetStore(store SyncChunkStore, nnf NewNetFetcherFunc) (*NetStore, error) + func (n *NetStore) BinIndex(po uint8) uint64 + func (n *NetStore) Close() + func (n *NetStore) FetchFunc(ctx context.Context, ref Address) func(context.Context) error + func (n *NetStore) Get(rctx context.Context, ref Address) (Chunk, error) + func (n *NetStore) Has(ctx context.Context, ref Address) bool + func (n *NetStore) Iterator(from uint64, to uint64, po uint8, f func(Address, uint64) bool) error + func (n *NetStore) Put(ctx context.Context, ch Chunk) error + func (n *NetStore) RequestsCacheLen() int + type NewNetFetcherFunc func(ctx context.Context, addr Address, peers *sync.Map) NetFetcher + type Putter interface + Close func() + Put func(context.Context, ChunkData) (Reference, error) + RefSize func() int64 + Wait func(context.Context) error + type PyramidChunker struct + func NewPyramidSplitter(params *PyramidSplitterParams) (pc *PyramidChunker) + func (pc *PyramidChunker) Append(ctx context.Context) (k Address, wait func(context.Context) error, err error) + func (pc *PyramidChunker) Join(addr Address, getter Getter, depth int) LazySectionReader + func (pc *PyramidChunker) Split(ctx context.Context) (k Address, wait func(context.Context) error, err error) + type PyramidSplitterParams struct + func NewPyramidSplitterParams(addr Address, reader io.Reader, putter Putter, getter Getter, chunkSize int64) *PyramidSplitterParams + type Reference []byte + type SplitterParams struct + type StoreParams struct + BaseKey []byte + CacheCapacity uint + DbCapacity uint64 + Hash SwarmHasher + func NewDefaultStoreParams() *StoreParams + func NewStoreParams(ldbCap uint64, cacheCap uint, hash SwarmHasher, basekey []byte) *StoreParams + type SwarmHash interface + ResetWithLength func([]byte) + type SwarmHasher func() SwarmHash + func MakeHashFunc(hash string) SwarmHasher + type SyncChunkStore interface + BinIndex func(po uint8) uint64 + FetchFunc func(ctx context.Context, ref Address) func(context.Context) error + Iterator func(from uint64, to uint64, po uint8, f func(Address, uint64) bool) error + type TreeChunker struct + func NewTreeJoiner(params *JoinerParams) *TreeChunker + func NewTreeSplitter(params *TreeSplitterParams) *TreeChunker + func (tc *TreeChunker) Join(ctx context.Context) *LazyChunkReader + func (tc *TreeChunker) Split(ctx context.Context) (k Address, wait func(context.Context) error, err error) + type TreeEntry struct + func NewTreeEntry(pyramid *PyramidChunker) *TreeEntry + type TreeSplitterParams struct