Documentation ¶
Overview ¶
Package gotinydb implements a simple but useful embedded database.
It supports document insertion and retrieving of golang pointers via the JSON package. Those documents can be indexed with Bleve.
File management is also supported and the all database is encrypted.
It relais on Bleve and Badger to do the job.
Example ¶
package main import ( "fmt" "log" "os" "time" "github.com/alexandrestein/gotinydb" "github.com/blevesearch/bleve" ) type user struct { ID int Email string LastLogin time.Time } var ( testTime time.Time dbKey [32]byte ) func main() { // Open or create the database at the given path and with the given encryption key db, err := gotinydb.Open(os.TempDir()+"/package_example", dbKey) if err != nil { log.Fatal(err) } defer db.Close() // Open a collection var c *gotinydb.Collection c, err = db.Use("users") if err != nil { log.Fatal(err) } // Build the index mapping (take a look at bleve) // This is a static mapping document to index only specified fields userDocumentMapping := bleve.NewDocumentStaticMapping() // Build the field checker emailFieldMapping := bleve.NewTextFieldMapping() // Add a text filed to Email property userDocumentMapping.AddFieldMappingsAt("Email", emailFieldMapping) // In this case it indexes only the field "Email" // Save the bleve indexexes err = c.SetBleveIndex("email", userDocumentMapping) if err != nil { if err != gotinydb.ErrNameAllreadyExists { log.Fatal(err) } } // Example user record := &user{ 316, "jonas-90@tlaloc.com", testTime, } // Save it in DB if err = c.Put("id", record); err != nil { log.Fatal(err) } // Build the query query := bleve.NewQueryStringQuery(record.Email) // Add the query to the search var searchResult *gotinydb.SearchResult searchResult, err = c.Search("email", query) if err != nil { log.Fatal(err) } // Convert the reccored into a struct using JSON internally retrievedRecord := new(user) id, respErr := searchResult.Next(retrievedRecord) if respErr != nil { log.Fatal(respErr) } // Display the result fmt.Println(id) fmt.Println(retrievedRecord.ID, retrievedRecord.Email, retrievedRecord.LastLogin.Format(time.Stamp)) }
Output: id 316 jonas-90@tlaloc.com Nov 5 12:20:44
Index ¶
- Variables
- type Batch
- type BleveIndex
- type Collection
- func (c *Collection) Delete(id string) (err error)
- func (c *Collection) DeleteIndex(name string)
- func (c *Collection) Get(id string, dest interface{}) (contentAsBytes []byte, err error)
- func (c *Collection) GetBleveIndex(name string) (*BleveIndex, error)
- func (c *Collection) GetIterator() *CollectionIterator
- func (c *Collection) GetMulti(ids []string, destinations []interface{}) (contentsAsBytes [][]byte, err error)
- func (c *Collection) GetRevertedIterator() *CollectionIterator
- func (c *Collection) History(id string, limit int) (valuesAsBytes [][]byte, err error)
- func (c *Collection) NewBatch(ctx context.Context) (*Batch, error)
- func (c *Collection) Put(id string, content interface{}) error
- func (c *Collection) PutWithCleanHistory(id string, content interface{}) (err error)
- func (c *Collection) PutWithTTL(id string, content interface{}, ttl time.Duration) error
- func (c *Collection) Search(indexName string, query query.Query) (*SearchResult, error)
- func (c *Collection) SearchWithOptions(indexName string, searchRequest *bleve.SearchRequest) (*SearchResult, error)
- func (c *Collection) SetBleveIndex(name string, documentMapping *mapping.DocumentMapping) (err error)
- type CollectionIterator
- func (i CollectionIterator) Close()
- func (i *CollectionIterator) GetBytes() []byte
- func (i *CollectionIterator) GetID() string
- func (i *CollectionIterator) GetValue(dest interface{})
- func (i *CollectionIterator) Next()
- func (i *CollectionIterator) Seek(id string)
- func (i *CollectionIterator) Valid() bool
- type DB
- type FileIterator
- type FileMeta
- type FileStore
- func (fs *FileStore) DeleteFile(id string) (err error)
- func (fs *FileStore) GetFileIterator() *FileIterator
- func (fs *FileStore) GetFileReader(id string) (Reader, error)
- func (fs *FileStore) GetFileWriter(id, name string) (Writer, error)
- func (fs *FileStore) GetFileWriterRelated(id, name string, colName, documentID string) (Writer, error)
- func (fs *FileStore) GetFileWriterWithTTL(id, name string, ttl time.Duration) (Writer, error)
- func (fs *FileStore) PutFile(id string, name string, reader io.Reader) (n int, err error)
- func (fs *FileStore) PutFileRelated(id string, name string, reader io.Reader, colName, documentID string) (n int, err error)
- func (fs *FileStore) PutFileWithTTL(id string, name string, reader io.Reader, ttl time.Duration) (n int, err error)
- func (fs *FileStore) ReadFile(id string, writer io.Writer) error
- type Reader
- type Response
- type SearchResult
- type Writer
Examples ¶
Constants ¶
This section is empty.
Variables ¶
var ( ErrNotFound = fmt.Errorf("not found") ErrHashCollision = fmt.Errorf("the name is in collision with an other element") ErrEmptyID = fmt.Errorf("ID must be provided") ErrIndexNotFound = fmt.Errorf("index not found") ErrNameAllreadyExists = fmt.Errorf("element with the same name allready exists") ErrIndexAllreadyExistsWithDifferentMapping = fmt.Errorf("index with the same name allready exists with different mapping") ErrGetMultiNotEqual = fmt.Errorf("you must provied the same number of ids and destinations") ErrEndOfQueryResult = fmt.Errorf("there is no more values to retrieve from the query") ErrFileInWrite = fmt.Errorf("this file is already in write mode") ErrFileItemIteratorNotValid = fmt.Errorf("item is not valid") )
This defines most of the package errors
var ( // FileChuckSize define the default chunk size when saving files FileChuckSize = 5 * 1000 * 1000 // 5MB // ReaderWriterTimeout define the default time before the file reader or writer // close itself. The goal of this is to prevent having many reader/writer // left open by mistake. ReaderWriterTimeout = time.Minute * 10 )
Functions ¶
This section is empty.
Types ¶
type Batch ¶ added in v0.3.1
type Batch struct {
// contains filtered or unexported fields
}
Batch is a simple struct to manage multiple write in one commit
Example ¶
package main import ( "context" "fmt" "log" "time" "github.com/alexandrestein/gotinydb" ) type user struct { ID int Email string LastLogin time.Time } var ( testTime time.Time exampleCollection *gotinydb.Collection ) func main() { ctx, cancel := context.WithCancel(context.Background()) defer cancel() u1 := &user{ 2435, "user1@dom.com", testTime, } batch, err := exampleCollection.NewBatch(ctx) batch.Put("second write", u1) if err != nil { log.Fatal(err) } u2 := &user{ 56548, "user2@dom.com", testTime, } batch.Put("second write", u2) err = batch.Write() if err != nil { log.Fatal(err) } fmt.Println(err) }
Output: <nil>
func (*Batch) Delete ¶ added in v0.3.1
Delete add a delete operation to the existing Transaction pointer
func (*Batch) PutClean ¶ added in v0.3.1
PutClean add a put operation to the existing Transaction pointer but clean existing history of the id
func (*Batch) PutWithTTL ¶ added in v0.4.1
PutWithTTL add a put operation to the existing Transaction pointer. The ttl parameter tels the database who long to keep the record in DB. After the given duration the content and the ID are removed from DB with clean history.
type BleveIndex ¶ added in v0.3.0
type BleveIndex struct { // Signature provide a way to check if the index definition // has been updated since initialization. Signature [blake2b.Size256]byte Path string BleveIndexAsBytes []byte // contains filtered or unexported fields }
BleveIndex defines for now the only supported index (no plan for other unless it's needed).
type Collection ¶
type Collection struct { // BleveIndexes in public for marshalling reason and should never be used directly BleveIndexes []*BleveIndex // contains filtered or unexported fields }
Collection defines the base element for saving objects. It holds the indexes and the values.
func (*Collection) Delete ¶
func (c *Collection) Delete(id string) (err error)
Delete deletes all references of the given id.
func (*Collection) DeleteIndex ¶
func (c *Collection) DeleteIndex(name string)
DeleteIndex delete the index and all references
func (*Collection) Get ¶
func (c *Collection) Get(id string, dest interface{}) (contentAsBytes []byte, err error)
Get returns the saved element. It fills up the given dest pointer if provided. It always returns the content as a stream of bytes and an error if any.
Example ¶
package main import ( "fmt" "time" "github.com/alexandrestein/gotinydb" ) var exampleCollection *gotinydb.Collection func main() { record := &struct { Name string }{ "testing name", } exampleCollection.Put("id", record) retrievedRecord := &struct { Name string }{} recordAsBytes, _ := exampleCollection.Get("id", retrievedRecord) fmt.Println(string(recordAsBytes)) fmt.Println(retrievedRecord) }
Output: {"Name":"testing name"} &{testing name}
func (*Collection) GetBleveIndex ¶ added in v0.3.0
func (c *Collection) GetBleveIndex(name string) (*BleveIndex, error)
GetBleveIndex gives an easy way to interact directly with bleve
func (*Collection) GetIterator ¶ added in v0.3.1
func (c *Collection) GetIterator() *CollectionIterator
GetIterator provides an easy way to list elements
func (*Collection) GetMulti ¶ added in v0.3.1
func (c *Collection) GetMulti(ids []string, destinations []interface{}) (contentsAsBytes [][]byte, err error)
GetMulti open one badger transaction and get all document concurrently
func (*Collection) GetRevertedIterator ¶ added in v0.3.1
func (c *Collection) GetRevertedIterator() *CollectionIterator
GetRevertedIterator does same as above but work in the opposite way
func (*Collection) History ¶ added in v0.3.0
func (c *Collection) History(id string, limit int) (valuesAsBytes [][]byte, err error)
History returns the previous versions of the given id. The first value is the actual value and more you travel inside the list more the records are old.
func (*Collection) NewBatch ¶ added in v0.3.1
func (c *Collection) NewBatch(ctx context.Context) (*Batch, error)
NewBatch build a new write transaction to do all write operation in one commit
func (*Collection) Put ¶
func (c *Collection) Put(id string, content interface{}) error
Put sets a new element into the collection. If the content match some of the indexes it will be indexed
Example ¶
package main import ( "fmt" "time" "github.com/alexandrestein/gotinydb" ) var exampleCollection *gotinydb.Collection func main() { record := &struct{}{} err := exampleCollection.Put("id", record) fmt.Println(err) }
Output: <nil>
func (*Collection) PutWithCleanHistory ¶ added in v0.3.0
func (c *Collection) PutWithCleanHistory(id string, content interface{}) (err error)
PutWithCleanHistory set the content to the given id but clean all previous records of this id
func (*Collection) PutWithTTL ¶ added in v0.4.1
func (c *Collection) PutWithTTL(id string, content interface{}, ttl time.Duration) error
PutWithTTL does the same as *Collection.Put but removes the content and its ID after the given duration
func (*Collection) Search ¶ added in v0.3.0
func (c *Collection) Search(indexName string, query query.Query) (*SearchResult, error)
Search make a search with the default bleve search request bleve.NewSearchRequest() and returns a local SearchResult pointer
Example ¶
package main import ( "fmt" "log" "time" "github.com/alexandrestein/gotinydb" "github.com/blevesearch/bleve" ) var exampleCollection *gotinydb.Collection func main() { query := bleve.NewMatchQuery("example") response, err := exampleCollection.Search("index X", query) if err != nil { log.Fatal(err) } dest := &struct{ Name string }{} var resp *gotinydb.Response resp, err = response.NextResponse(dest) if err != nil { log.Fatal(err) } fmt.Println(dest) fmt.Println(resp.ID) fmt.Println(string(resp.Content)) fmt.Println(resp.DocumentMatch.Score) }
Output: &{I'm the example document} index X document {"Name":"I'm the example document"} 0.7071067690849304
func (*Collection) SearchWithOptions ¶ added in v0.3.0
func (c *Collection) SearchWithOptions(indexName string, searchRequest *bleve.SearchRequest) (*SearchResult, error)
SearchWithOptions does the same as *Collection.Search but you provide the searchRequest
func (*Collection) SetBleveIndex ¶ added in v0.3.0
func (c *Collection) SetBleveIndex(name string, documentMapping *mapping.DocumentMapping) (err error)
SetBleveIndex adds a bleve index to the collection. It build a new index with the given index mapping.
Example ¶
package main import ( "fmt" "time" "github.com/alexandrestein/gotinydb" "github.com/blevesearch/bleve" ) var exampleCollection *gotinydb.Collection func main() { // Build the index mapping (take a look at bleve) // This is a static mapping document to index only specified fields userDocumentMapping := bleve.NewDocumentStaticMapping() // Build the field checker emailFieldMapping := bleve.NewTextFieldMapping() // Add a text filed to Email property userDocumentMapping.AddFieldMappingsAt("Email", emailFieldMapping) err := exampleCollection.SetBleveIndex("your index name", userDocumentMapping) fmt.Println(err) }
Output: <nil>
type CollectionIterator ¶ added in v0.3.1
type CollectionIterator struct {
// contains filtered or unexported fields
}
CollectionIterator provides a nice way to list elements
Example ¶
package main import ( "fmt" "time" "github.com/alexandrestein/gotinydb" ) var exampleCollection *gotinydb.Collection func main() { iter := exampleCollection.GetIterator() defer iter.Close() for ; iter.Valid(); iter.Next() { fmt.Println(string(iter.GetBytes())) } }
Output: {"Name":"testing name"} {"Name":"I'm the example document"} {"ID":56548,"Email":"user2@dom.com","LastLogin":"2018-11-05T12:20:44.588809926+01:00"}
func (CollectionIterator) Close ¶ added in v0.3.1
func (i CollectionIterator) Close()
Close closes the current iterator and it's related components. This method needs to be called ones the iterator is no more needed.
func (*CollectionIterator) GetBytes ¶ added in v0.3.1
func (i *CollectionIterator) GetBytes() []byte
GetBytes returns the document as a slice of bytes
func (*CollectionIterator) GetID ¶ added in v0.3.1
func (i *CollectionIterator) GetID() string
GetID returns the collection id if the current element
func (*CollectionIterator) GetValue ¶ added in v0.3.1
func (i *CollectionIterator) GetValue(dest interface{})
GetValue tries to fill-up the dest pointer with the coresponding document
func (*CollectionIterator) Next ¶ added in v0.3.1
func (i *CollectionIterator) Next()
Next moves the cursor to the next position. If the iterator is in regular mode it will move to the smallest bigger key than the current one. If the iterator is in reverted mode it will move to the biggest smaller key than the current one.
func (*CollectionIterator) Seek ¶ added in v0.3.1
func (i *CollectionIterator) Seek(id string)
Seek would seek to the provided key if present. If absent, it would seek to the next smallest key greater than provided if iterating in the forward direction. Behavior would be reversed is iterating backwards.
func (*CollectionIterator) Valid ¶ added in v0.3.1
func (i *CollectionIterator) Valid() bool
Valid returns true if the cursor still on valid value. It returns false if the iteration is done
type DB ¶
type DB struct { // PrivateKey is public for marshaling reason and should never by used or changes. // This is the primary key used to derive every records. PrivateKey [32]byte // Collection is public for marshaling reason and should never be used. // It contains the collections pointers used to manage the documents. Collections []*Collection // FileStore provides all accessibility to the file storage facilities FileStore *FileStore // contains filtered or unexported fields }
DB is the base struct of the package. It provides the collection and manage all writes to the database.
func Open ¶
Open initialize a new database or open an existing one. The path defines the place the data will be saved and the configuration key permit to decrypt existing configuration and to encrypt new one.
Example ¶
package main import ( "fmt" "log" "time" "github.com/alexandrestein/gotinydb" ) var dbKey [32]byte func main() { // Open or create the database at the given path and with the given encryption key db, err := gotinydb.Open("path_to_database_directory", dbKey) if err != nil { log.Fatal(err) } // Remumber to close the database err = db.Close() fmt.Println(err) }
Output: <nil>
func (*DB) Backup ¶
Backup perform a full backup of the database. It fills up the io.Writer with all data indexes and configurations.
func (*DB) DeleteCollection ¶
DeleteCollection removes every document and indexes and the collection itself
func (*DB) GarbageCollection ¶ added in v0.6.1
GarbageCollection provides access to the garbage collection for the underneath database storeage (Badger).
RunValueLogGC triggers a value log garbage collection.
It picks value log files to perform GC based on statistics that are collected duing compactions. If no such statistics are available, then log files are picked in random order. The process stops as soon as the first log file is encountered which does not result in garbage collection. When a log file is picked, it is first sampled. If the sample shows that we can discard at least discardRatio space of that file, it would be rewritten. If a call to RunValueLogGC results in no rewrites, then an ErrNoRewrite is thrown indicating that the call resulted in no file rewrites. We recommend setting discardRatio to 0.5, thus indicating that a file be rewritten if half the space can be discarded. This results in a lifetime value log write amplification of 2 (1 from original write + 0.5 rewrite + 0.25 + 0.125 + ... = 2). Setting it to higher value would result in fewer space reclaims, while setting it to a lower value would result in more space reclaims at the cost of increased activity on the LSM tree. discardRatio must be in the range (0.0, 1.0), both endpoints excluded, otherwise an ErrInvalidRequest is returned. Only one GC is allowed at a time. If another value log GC is running, or DB has been closed, this would return an ErrRejected. Note: Every time GC is run, it would produce a spike of activity on the LSM tree.
func (*DB) Use ¶
func (d *DB) Use(colName string) (col *Collection, err error)
Use build a new collection or open an existing one.
Example ¶
package main import ( "fmt" "log" "time" "github.com/alexandrestein/gotinydb" ) var exampleDB *gotinydb.DB func main() { // Open a collection col, err := exampleDB.Use("collection name") if err != nil { log.Fatal(err) } fmt.Println(col.Name) fmt.Println(col.Prefix) }
Output: collection name [1 20 101]
type FileIterator ¶ added in v0.3.1
type FileIterator struct {
// contains filtered or unexported fields
}
FileIterator provides easy access to all written files
Example ¶
package main import ( "fmt" "time" "github.com/alexandrestein/gotinydb" ) var exampleDB *gotinydb.DB func main() { iter := exampleDB.FileStore.GetFileIterator() defer iter.Close() for ; iter.Valid(); iter.Next() { meta := iter.GetMeta() fmt.Println(meta.ID, meta.Name) } }
Output: read file txt file example test.txt
func (FileIterator) Close ¶ added in v0.3.1
func (i FileIterator) Close()
Close closes the current iterator and it's related components. This method needs to be called ones the iterator is no more needed.
func (*FileIterator) GetMeta ¶ added in v0.3.1
func (i *FileIterator) GetMeta() *FileMeta
GetMeta returns the metadata of the actual cursor position
func (*FileIterator) Next ¶ added in v0.3.1
func (i *FileIterator) Next() error
Next moves to the next valid metadata element
func (*FileIterator) Seek ¶ added in v0.3.1
func (i *FileIterator) Seek(id string)
Seek moves to the meta coresponding to the given id
func (*FileIterator) Valid ¶ added in v0.3.1
func (i *FileIterator) Valid() bool
Valid checks if the cursor point a valid metadata document
type FileMeta ¶ added in v0.3.1
type FileMeta struct { ID string Name string Size int64 LastModified time.Time ChuckSize int RelatedDocumentID string RelatedDocumentCollection string // contains filtered or unexported fields }
FileMeta defines some file metadata informations
type FileStore ¶ added in v0.5.0
type FileStore struct {
// contains filtered or unexported fields
}
FileStore defines database file storage object
func (*FileStore) DeleteFile ¶ added in v0.5.0
DeleteFile deletes every chunks of the given file ID
func (*FileStore) GetFileIterator ¶ added in v0.5.0
func (fs *FileStore) GetFileIterator() *FileIterator
GetFileIterator returns a file iterator which help to list existing files
func (*FileStore) GetFileReader ¶ added in v0.5.0
GetFileReader returns a struct to provide simple reading partial of big files. The default position is at the begining of the file.
func (*FileStore) GetFileWriter ¶ added in v0.5.0
GetFileWriter returns a struct to provide simple partial write of big files. The default position is at the end of the file.
func (*FileStore) GetFileWriterRelated ¶ added in v0.5.0
func (fs *FileStore) GetFileWriterRelated(id, name string, colName, documentID string) (Writer, error)
GetFileWriterRelated does the same as GetFileWriter but with related document
func (*FileStore) GetFileWriterWithTTL ¶ added in v0.5.0
GetFileWriterWithTTL does the same as GetFileWriter but it's automatically removed after the given duration
func (*FileStore) PutFile ¶ added in v0.5.0
PutFile let caller insert large element into the database via a reader interface
func (*FileStore) PutFileRelated ¶ added in v0.5.0
func (fs *FileStore) PutFileRelated(id string, name string, reader io.Reader, colName, documentID string) (n int, err error)
PutFileRelated does the same as *DB.PutFile but the file is automatically removed when the related document is removed. The use case can be for blog post for example. You have posts which has images and medias in it. Ones the post is removed the images and the medias are not needed anymore. This provide a easy way remove files automatically based on collection documents.
type Reader ¶ added in v0.3.1
Reader define a simple object to read parts of the file. After 10 minutes (ReaderWriterTimeout variable) the reader is automatically closed.
Example ¶
package main import ( "fmt" "io" "log" "time" "github.com/alexandrestein/gotinydb" ) var exampleDB *gotinydb.DB func main() { reader, err := exampleDB.FileStore.GetFileReader("read file") if err != nil { log.Fatal(err) } defer reader.Close() readBuffer := make([]byte, 100) n := 0 n, err = reader.ReadAt(readBuffer, 25) if err != nil && err != io.EOF { log.Fatal(err) } readBuffer = readBuffer[:n] fmt.Println(n, string(readBuffer)) }
Output: 39 GFvOPRqvWxfHUmAFAksELPTpV0lmPvwjMwdqq5i
type Response ¶
type Response struct { ID string Content []byte DocumentMatch *search.DocumentMatch }
Response are returned by *SearchResult.NextResponse if the caller needs to have access to the byte stream
type SearchResult ¶ added in v0.3.0
type SearchResult struct { BleveSearchResult *bleve.SearchResult // contains filtered or unexported fields }
SearchResult is returned bu *Collection.Search or *Collection.SearchWithOptions. It provides a easy listing of the result.
func (*SearchResult) Next ¶ added in v0.3.0
func (s *SearchResult) Next(dest interface{}) (id string, err error)
Next fills up the destination by marshaling the saved byte stream. It returns an error if any and the coresponding id of the element.
func (*SearchResult) NextResponse ¶ added in v0.3.0
func (s *SearchResult) NextResponse(dest interface{}) (resp *Response, _ error)
NextResponse fills up the destination by marshaling the saved byte stream. It returns the byte stream and the id of the document inside a Response pointer or an error if any.
type Writer ¶ added in v0.3.1
Writer define a simple object to write parts of the file. After 10 minutes (ReaderWriterTimeout variable) the writer is automatically closed.
Example ¶
package main import ( "fmt" "io" "log" "time" "github.com/alexandrestein/gotinydb" ) var exampleDB *gotinydb.DB func main() { writer, err := exampleDB.FileStore.GetFileWriter("file example", "test.txt") if err != nil { log.Fatal(err) } defer writer.Close() n := 0 writtenBytes := 0 n, err = writer.Write([]byte("this is a text file")) if err != nil { log.Fatal(err) } writtenBytes += n n, err = writer.Write([]byte("\n")) if err != nil { log.Fatal(err) } writtenBytes += n n, err = writer.Write([]byte("and then the second is written")) if err != nil { log.Fatal(err) } writtenBytes += n fmt.Println("writtenBytes", writtenBytes) readBuff := make([]byte, 1000) writer.Seek(0, io.SeekStart) n, err = writer.Read(readBuff) if err != nil && err != io.EOF { log.Fatal(err) } fmt.Println(n, string(readBuff[:n])) }
Output: writtenBytes 50 50 this is a text file and then the second is written