Skip to content

Commit

Permalink
library entry save data
Browse files Browse the repository at this point in the history
  • Loading branch information
kaki-admin committed May 6, 2024
1 parent 0693ef8 commit c53f301
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 4 deletions.
2 changes: 1 addition & 1 deletion backend-server/api/entry.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ func (h *handler) newFetchContent(entry *model.Entry) string {
feedSearchRSSList = append(feedSearchRSSList, feedNotification)
}
}
entry = crawler.EntryCrawler(entry, feedUrl, userAgent, cookie, certificates, fetchViaProxy)
crawler.EntryCrawler(entry, feedUrl, userAgent, cookie, certificates, fetchViaProxy)

notificationData := model.NotificationData{
Name: entry.Title,
Expand Down
3 changes: 1 addition & 2 deletions backend-server/crawler/entry.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (
"go.uber.org/zap"
)

func EntryCrawler(entry *model.Entry, feedUrl, userAgent, cookie string, certificates, fetchViaProxy bool) *model.Entry {
func EntryCrawler(entry *model.Entry, feedUrl, userAgent, cookie string, certificates, fetchViaProxy bool) {
//entryID, entryUrl, entryTitle, imageUrl, author string, entryPublishedAt int64, feed *model.Feed) (string, string, int64) {

entry.RawContent = fetchRawContnt(
Expand Down Expand Up @@ -65,7 +65,6 @@ func EntryCrawler(entry *model.Entry, feedUrl, userAgent, cookie string, certifi
} else {
common.Logger.Error("crawler raw content is null", zap.String("url", entry.URL))
}
return entry
//return rawContent, rtContent, entryPublishedAt
}

Expand Down
2 changes: 1 addition & 1 deletion backend-server/storage/entry.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ func (s *Storage) UpdateEntryContent(entry *model.Entry) error {
func (s *Storage) UpdateEntryDocID(entry *model.Entry) error {
coll := getEntryMongodbColl(s)
filter := bson.M{"_id": entry.ID}
update := bson.M{"$set": bson.M{"doc_id": entry.DocId}}
update := bson.M{"$set": bson.M{"crawler": true, "language": entry.Language, "author": entry.Author, "title": entry.Title, "raw_content": entry.RawContent, "full_content": entry.FullContent, "doc_id": entry.DocId}}

if _, err := coll.UpdateOne(context.TODO(), filter, update); err != nil {
common.Logger.Error("unable to update entry", zap.String("url", entry.ID.Hex()), zap.Error(err))
Expand Down

0 comments on commit c53f301

Please sign in to comment.