Finish the page-handler.
All checks were successful
continuous-integration/drone/push Build is passing

Remove unneeded utils.
Migrate ETag calculation code.
This commit is contained in:
Captain ALM 2022-07-15 12:18:33 +01:00
parent 98dac85215
commit 42dd0259af
Signed by: alfred
GPG Key ID: 4E4ADD02609997B1
4 changed files with 195 additions and 36 deletions

View File

@ -2,25 +2,37 @@ package pageHandler
import (
"golang.captainalm.com/cityuni-webserver/conf"
"golang.captainalm.com/cityuni-webserver/pageHandler/utils"
"io"
"mime/multipart"
"net/http"
"net/textproto"
"net/url"
"strconv"
"strings"
"sync"
"time"
)
type PageHandler struct {
PageContentsCache map[string][]byte
PageContentsCache map[string]*CachedPage
PageProviders map[string]PageProvider
pageContentsCacheRWMutex *sync.RWMutex
RangeSupported bool
CacheSettings conf.CacheSettingsYaml
}
type CachedPage struct {
Content []byte
ContentType string
LastMod time.Time
}
func NewPageHandler(config conf.ServeYaml) *PageHandler {
var thePCCMap map[string][]byte
var thePCCMap map[string]*CachedPage
var theMutex *sync.RWMutex
if config.CacheSettings.EnableContentsCaching {
thePCCMap = make(map[string][]byte)
thePCCMap = make(map[string]*CachedPage)
theMutex = &sync.RWMutex{}
}
return &PageHandler{
@ -33,19 +45,127 @@ func NewPageHandler(config conf.ServeYaml) *PageHandler {
}
func (ph *PageHandler) ServeHTTP(writer http.ResponseWriter, request *http.Request) {
//Provide processing for requests using providers
actualPagePath := strings.TrimRight(request.URL.Path, "/")
queryCollection, actualQueries := ph.GetCleanQuery(request)
var pageContent []byte
var pageContentType string
var lastMod time.Time
if ph.CacheSettings.EnableContentsCaching {
cached := ph.getPageFromCache(request.URL, actualQueries)
if cached != nil {
pageContent = cached.Content
pageContentType = cached.ContentType
lastMod = cached.LastMod
}
}
if pageContentType == "" {
if provider := ph.PageProviders[actualPagePath]; provider != nil {
pageContentType, pageContent = provider.GetContents(queryCollection)
lastMod = provider.GetLastModified()
if pageContentType != "" && ph.CacheSettings.EnableContentsCaching {
ph.setPageToCache(request.URL, actualQueries, &CachedPage{
Content: pageContent,
ContentType: pageContentType,
LastMod: lastMod,
})
}
}
}
allowedMethods := ph.getAllowedMethodsForPath(request.URL.Path)
allowed := false
if request.Method != http.MethodOptions {
for _, method := range allowedMethods {
if method == request.Method {
allowed = true
break
}
}
}
if allowed {
if pageContentType == "" {
utils.WriteResponseHeaderCanWriteBody(request.Method, writer, http.StatusNotFound, "Page Not Found")
} else {
switch request.Method {
case http.MethodGet, http.MethodHead:
writer.Header().Set("Content-Type", pageContentType)
writer.Header().Set("Content-Length", strconv.Itoa(len(pageContent)))
utils.SetLastModifiedHeader(writer.Header(), lastMod)
utils.SetCacheHeaderWithAge(writer.Header(), ph.CacheSettings.MaxAge, lastMod)
theETag := utils.GetValueForETagUsingByteArray(pageContent)
writer.Header().Set("ETag", theETag)
if utils.ProcessSupportedPreconditionsForNext(writer, request, lastMod, theETag, ph.CacheSettings.NotModifiedResponseUsingLastModified, ph.CacheSettings.NotModifiedResponseUsingETags) {
httpRangeParts := utils.ProcessRangePreconditions(int64(len(pageContent)), writer, request, lastMod, theETag, ph.RangeSupported)
if httpRangeParts != nil {
if len(httpRangeParts) <= 1 {
var theWriter io.Writer = writer
if len(httpRangeParts) == 1 {
theWriter = utils.NewPartialRangeWriter(theWriter, httpRangeParts[0])
}
_, _ = theWriter.Write(pageContent)
} else {
multWriter := multipart.NewWriter(writer)
writer.Header().Set("Content-Type", "multipart/byteranges; boundary="+multWriter.Boundary())
for _, currentPart := range httpRangeParts {
mimePart, err := multWriter.CreatePart(textproto.MIMEHeader{
"Content-Range": {currentPart.ToField(int64(len(pageContent)))},
"Content-Type": {"text/plain; charset=utf-8"},
})
if err != nil {
break
}
_, err = mimePart.Write(pageContent[currentPart.Start : currentPart.Start+currentPart.Length])
if err != nil {
break
}
}
_ = multWriter.Close()
}
}
}
case http.MethodDelete:
ph.PurgeTemplateCache(actualPagePath)
ph.PurgeContentsCache(request.URL.Path, actualQueries)
utils.SetNeverCacheHeader(writer.Header())
utils.WriteResponseHeaderCanWriteBody(request.Method, writer, http.StatusOK, "")
}
}
} else {
theAllowHeaderContents := ""
for _, method := range allowedMethods {
theAllowHeaderContents += method + ", "
}
writer.Header().Set("Allow", strings.TrimSuffix(theAllowHeaderContents, ", "))
if request.Method == http.MethodOptions {
utils.WriteResponseHeaderCanWriteBody(request.Method, writer, http.StatusOK, "")
} else {
utils.WriteResponseHeaderCanWriteBody(request.Method, writer, http.StatusMethodNotAllowed, "")
}
}
}
func (ph *PageHandler) GetCleanQuery(request *http.Request) url.Values {
func (ph *PageHandler) GetCleanQuery(request *http.Request) (url.Values, string) {
toClean := request.URL.Query()
provider := ph.PageProviders[request.URL.Path]
if provider == nil {
return make(url.Values)
return make(url.Values), ""
}
supportedKeys := provider.GetSupportedURLParameters()
toDelete := make([]string, len(toClean))
theSize := 0
for s := range toClean {
theQuery := ""
for s, v := range toClean {
noExist := true
for _, key := range supportedKeys {
if s == key {
@ -56,19 +176,27 @@ func (ph *PageHandler) GetCleanQuery(request *http.Request) url.Values {
if noExist {
toDelete[theSize] = s
theSize++
} else {
for _, i := range v {
if i == "" {
theQuery += s + "&"
} else {
theQuery += s + "=" + i + "&"
}
}
}
}
for i := 0; i < theSize; i++ {
delete(toClean, toDelete[i])
}
return toClean
return toClean, strings.TrimRight(theQuery, "&")
}
func (ph *PageHandler) PurgeContentsCache(path string, query string) {
if ph.CacheSettings.EnableContentsCaching {
if ph.CacheSettings.EnableContentsCaching && ph.CacheSettings.EnableContentsCachePurge {
if path == "" {
ph.pageContentsCacheRWMutex.Lock()
ph.PageContentsCache = make(map[string][]byte)
ph.PageContentsCache = make(map[string]*CachedPage)
ph.pageContentsCacheRWMutex.Unlock()
} else {
if strings.HasSuffix(path, "/") {
@ -114,3 +242,47 @@ func (ph *PageHandler) PurgeTemplateCache(path string) {
}
}
}
func (ph *PageHandler) getPageFromCache(urlIn *url.URL, cleanedQueries string) *CachedPage {
ph.pageContentsCacheRWMutex.RLock()
defer ph.pageContentsCacheRWMutex.RUnlock()
if strings.HasSuffix(urlIn.Path, "/") {
return ph.PageContentsCache[strings.TrimRight(urlIn.Path, "/")]
} else {
if cleanedQueries == "" {
return ph.PageContentsCache[urlIn.Path]
} else {
return ph.PageContentsCache[urlIn.Path+"?"+cleanedQueries]
}
}
}
func (ph *PageHandler) setPageToCache(urlIn *url.URL, cleanedQueries string, newPage *CachedPage) {
ph.pageContentsCacheRWMutex.Lock()
defer ph.pageContentsCacheRWMutex.Unlock()
if strings.HasSuffix(urlIn.Path, "/") {
ph.PageContentsCache[strings.TrimRight(urlIn.Path, "/")] = newPage
} else {
if cleanedQueries == "" {
ph.PageContentsCache[urlIn.Path] = newPage
} else {
ph.PageContentsCache[urlIn.Path+"?"+cleanedQueries] = newPage
}
}
}
func (ph *PageHandler) getAllowedMethodsForPath(pathIn string) []string {
if strings.HasSuffix(pathIn, "/") {
if (ph.CacheSettings.EnableTemplateCaching && ph.CacheSettings.EnableTemplateCachePurge) ||
(ph.CacheSettings.EnableContentsCaching && ph.CacheSettings.EnableContentsCachePurge) {
return []string{http.MethodHead, http.MethodGet, http.MethodOptions, http.MethodDelete}
} else {
return []string{http.MethodHead, http.MethodGet, http.MethodOptions}
}
} else {
if ph.CacheSettings.EnableContentsCaching && ph.CacheSettings.EnableContentsCachePurge {
return []string{http.MethodHead, http.MethodGet, http.MethodOptions, http.MethodDelete}
} else {
return []string{http.MethodHead, http.MethodGet, http.MethodOptions}
}
}
}

View File

@ -1,10 +1,14 @@
package pageHandler
import "net/url"
import (
"net/url"
"time"
)
type PageProvider interface {
GetPath() string
GetSupportedURLParameters() []string
GetLastModified() time.Time
GetContents(urlParameters url.Values) (contentType string, contents []byte)
PurgeTemplate()
}

View File

@ -1,23 +0,0 @@
package utils
import (
"crypto"
"encoding/hex"
)
type BufferedWriter struct {
Data []byte
}
func (c *BufferedWriter) Write(p []byte) (n int, err error) {
c.Data = append(c.Data, p...)
return len(p), nil
}
func (c *BufferedWriter) GetHashString() string {
theHash := crypto.SHA1.New()
_, _ = theHash.Write(c.Data)
theSum := theHash.Sum(nil)
theHash.Reset()
return hex.EncodeToString(theSum)
}

View File

@ -1,11 +1,17 @@
package utils
import (
"crypto"
"encoding/hex"
"strings"
)
func GetValueForETagUsingBufferedWriter(bWriter *BufferedWriter) string {
return "\"" + bWriter.GetHashString() + "\""
func GetValueForETagUsingByteArray(b []byte) string {
theHash := crypto.SHA1.New()
_, _ = theHash.Write(b)
theSum := theHash.Sum(nil)
theHash.Reset()
return "\"" + hex.EncodeToString(theSum) + "\""
}
func GetETagValues(stringIn string) []string {