<?xml version="1.0" encoding="UTF-8" ?>
<rss
    version="2.0"
    xmlns:atom="http://www.w3.org/2005/Atom"
    xmlns:content="http://purl.org/rss/1.0/modules/content/"
    xmlns:webfeeds="http://webfeeds.org/rss/1.0"
    xmlns:media="http://search.yahoo.com/mrss/"
    >
    <channel>
        <title>Llama2 Tag - Viblo</title>
        <link>https://viblo.asia/rss</link>
        <description><![CDATA[Free service for technical knowledge sharing]]></description>
        <atom:link href="https://viblo.asia/rss/tags/llama2.rss" rel="self"></atom:link>
                <copyright>Sun* Inc.</copyright>
                                                <webfeeds:logo>https://viblo.asia/logo_full.svg</webfeeds:logo>
        
                                <language>vi-vn</language>
        <lastBuildDate>2026-04-08T18:41:44+07:00</lastBuildDate>
                <item>
            <title><![CDATA[Text Summarization with LLM]]></title>
                        <link>https://viblo.asia/p/text-summarization-with-llm-MkNLrbxoLgA</link>
            <guid isPermaLink="true">https://viblo.asia/p/text-summarization-with-llm-MkNLrbxoLgA</guid>
            <description><![CDATA[Introduction
In my previous post, I have discussed about the summarization task in NLP, which is a very interesting topic. In this writing, I will imp...]]></description>
                        <dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Lê Đức Minh</dc:creator>
            <pubDate>2024-02-25 23:00:43</pubDate>
                                                                                                        </item>
                <item>
            <title><![CDATA[Tinh chỉnh Llama2 với LoRA cho bài toán hỏi-đáp (phần 2)]]></title>
                        <link>https://viblo.asia/p/tinh-chinh-llama2-voi-lora-cho-bai-toan-hoi-dap-phan-2-5OXLAP88JGr</link>
            <guid isPermaLink="true">https://viblo.asia/p/tinh-chinh-llama2-voi-lora-cho-bai-toan-hoi-dap-phan-2-5OXLAP88JGr</guid>
            <description><![CDATA[Phần 1 của bài viết ở đây
Thiết lập model và tokenizer
Tiếp theo chúng ta chọn một mô hình Llama 2 đã được huấn luyện trước (Llama-2-7b-chat-hf). Sau ...]]></description>
                        <dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">TungDT</dc:creator>
            <pubDate>2023-10-13 10:19:43</pubDate>
                                                                                                        </item>
            </channel>
</rss>
