demand
The back-end template file references front-end resources, and when publishing, the version of the front-end resources is automatically replaced with a Python script.
Ready to work
The front end is built using Webpack, and each time the build is completed, a buildfile.json will be generated
buildfile.json
{ "about.css": "about-ca3f85cec35d3ab39ac0.css", "about.js": "about-613281148cb8885f2b3d.js", "home.css": "home-94093b44a5a85be01f38.css", "home.js": "home-6dd4e116818ee4945f65.js" } Copy code
Read the front-end build file
import them import json BUILD_FILES = {} def get_build_files(): build_file_path = '/Users/pengjie/try/iseo2/dist/buildfile.json' if(os.path.exists(build_file_path)): with open(build_file_path,'r') as f: return json.load(f) else: print('build file not exists.') return '' BUILD_FILES = get_build_files(); Copy code
Get all template files
def get_template_files(): template_path = '/Users/pengjie/try/iseo2/tmpviews' template_files = [] for dirpath, dirnames, filenames in os.walk(template_path): for filename in filenames: if(filename[0] != '.'): filepath = os.path.join(dirpath,filename) template_files.append(filepath) return template_files Copy code
Get all scripts and links to be replaced
import them import re from bs4 import BeautifulSoup def get_replace_list(content): replace_list = [] soup = BeautifulSoup(content,'html.parser') # Get all links links = soup.find_all('link') for item in links: name = os.path.basename(item.get('href')) matches = re.search(r'\-(.+)?(\.)',name,re.I) key = name.replace(matches[0],'.') replace_list.append({ 'old': name, 'new': BUILD_FILES.get(key,'')}) # Get all scripts scripts = soup.find_all('script') for item in scripts: name = os.path.basename(item.get('src')) matches = re.search(r'\-(.+)?(\.)',name,re.I) key = name.replace(matches[0],'.') replace_list.append({ 'old': name, 'new': BUILD_FILES.get(key,'')}) return replace_list Copy code
Find and replace
def search_and_replace(tplpath): content = '' with open(tplpath,'r',encoding="utf-8") as f: content = f.read() replace_list = get_replace_list(content) for item in replace_list: if(item['old'] and item['new']): content = content.replace(item['old'],item['new']) with open(tplpath,"w",encoding="utf-8") as f: f.write(content) Copy code
Full script content
import them import re import json from bs4 import BeautifulSoup BUILD_FILES = {} def run(): view_path = '/Users/pengjie/try/iseo2/tmpviews' for dirpath, dirnames, filenames in os.walk(view_path): for filename in filenames: if(filename[0] != '.'): filepath = os.path.join(dirpath,filename) # Read the file, get the replacement list content = '' with open(filepath,'r',encoding="utf-8") as f: content = f.read() replace_list = get_replace_list(content) for item in replace_list: if(item['old'] and item['new']): content = content.replace(item['old'],item['new']) with open(filepath,"w",encoding="utf-8") as f: f.write(content) print('----') # Get replacement list def get_replace_list(content): replace_list = [] soup = BeautifulSoup(content,'html.parser') # Get all links links = soup.find_all('link') for item in links: name = os.path.basename(item.get('href')) matches = re.search(r'\-(.+)?(\.)',name,re.I) key = name.replace(matches[0],'.') replace_list.append({ 'old': name, 'new': BUILD_FILES.get(key,'')}) # Get all scripts scripts = soup.find_all('script') for item in scripts: name = os.path.basename(item.get('src')) matches = re.search(r'\-(.+)?(\.)',name,re.I) key = name.replace(matches[0],'.') replace_list.append({ 'old': name, 'new': BUILD_FILES.get(key,'')}) return replace_list # Get build file def get_build_file(): build_file_path = '/Users/pengjie/try/iseo2/dist/buildfile.json' if(os.path.exists(build_file_path)): with open(build_file_path,'r') as f: return json.load(f) else: print('build file not exists.') return '' if __name__ == '__main__': BUILD_FILES = get_build_file() if(BUILD_FILES): run() Copy code
Click here to get the complete project code